mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-30 13:51:31 +00:00
Merge #3220
3220: Fix clippy warnings, update Cargo.toml versions r=matklad a=SomeoneToIgnore In the `cargo xtask lint` ouptut, there were two interesting Clippy warnings that might be interesting to investigate further: * warning: this argument (4 byte) is passed by reference, but would be more efficient if passed by value (limit: 8 byte) * warning: large size difference between variants Co-authored-by: Kirill Bulatov <mail4score@gmail.com>
This commit is contained in:
commit
cecf25b72f
88 changed files with 381 additions and 391 deletions
20
Cargo.lock
generated
20
Cargo.lock
generated
|
@ -1043,6 +1043,7 @@ version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"either",
|
"either",
|
||||||
"format-buf",
|
"format-buf",
|
||||||
|
"fst",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"insta",
|
"insta",
|
||||||
"itertools",
|
"itertools",
|
||||||
|
@ -1059,8 +1060,11 @@ dependencies = [
|
||||||
"ra_syntax",
|
"ra_syntax",
|
||||||
"ra_text_edit",
|
"ra_text_edit",
|
||||||
"rand",
|
"rand",
|
||||||
|
"rayon",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
"superslice",
|
||||||
"test_utils",
|
"test_utils",
|
||||||
|
"unicase",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1087,6 +1091,7 @@ dependencies = [
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"superslice",
|
"superslice",
|
||||||
"test_utils",
|
"test_utils",
|
||||||
|
"unicase",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1597,6 +1602,15 @@ dependencies = [
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "unicase"
|
||||||
|
version = "2.6.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
|
||||||
|
dependencies = [
|
||||||
|
"version_check",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-bidi"
|
name = "unicode-bidi"
|
||||||
version = "0.3.4"
|
version = "0.3.4"
|
||||||
|
@ -1639,6 +1653,12 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "version_check"
|
||||||
|
version = "0.9.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "walkdir"
|
name = "walkdir"
|
||||||
version = "2.3.1"
|
version = "2.3.1"
|
||||||
|
|
|
@ -10,8 +10,8 @@ doctest = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
format-buf = "1.0.0"
|
format-buf = "1.0.0"
|
||||||
join_to_string = "0.1.3"
|
join_to_string = "0.1.3"
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "1.1.0"
|
||||||
either = "1.5"
|
either = "1.5.3"
|
||||||
|
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
ra_text_edit = { path = "../ra_text_edit" }
|
ra_text_edit = { path = "../ra_text_edit" }
|
||||||
|
|
|
@ -43,9 +43,9 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> {
|
||||||
.clone();
|
.clone();
|
||||||
|
|
||||||
let trait_token =
|
let trait_token =
|
||||||
ctx.token_at_offset().filter(|t| t.kind() == IDENT && *t.text() != attr_name).next()?;
|
ctx.token_at_offset().find(|t| t.kind() == IDENT && *t.text() != attr_name)?;
|
||||||
|
|
||||||
let annotated = attr.syntax().siblings(Direction::Next).find_map(|s| ast::Name::cast(s))?;
|
let annotated = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?;
|
||||||
let annotated_name = annotated.syntax().text().to_string();
|
let annotated_name = annotated.syntax().text().to_string();
|
||||||
let start_offset = annotated.syntax().parent()?.text_range().end();
|
let start_offset = annotated.syntax().parent()?.text_range().end();
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> {
|
||||||
.filter_map(|t| t.into_token().map(|t| t.text().clone()))
|
.filter_map(|t| t.into_token().map(|t| t.text().clone()))
|
||||||
.filter(|t| t != trait_token.text())
|
.filter(|t| t != trait_token.text())
|
||||||
.collect::<Vec<SmolStr>>();
|
.collect::<Vec<SmolStr>>();
|
||||||
let has_more_derives = new_attr_input.len() > 0;
|
let has_more_derives = !new_attr_input.is_empty();
|
||||||
let new_attr_input =
|
let new_attr_input =
|
||||||
join(new_attr_input.iter()).separator(", ").surround_with("(", ")").to_string();
|
join(new_attr_input.iter()).separator(", ").surround_with("(", ")").to_string();
|
||||||
let new_attr_input_len = new_attr_input.len();
|
let new_attr_input_len = new_attr_input.len();
|
||||||
|
@ -86,7 +86,7 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> {
|
||||||
.next_sibling_or_token()
|
.next_sibling_or_token()
|
||||||
.filter(|t| t.kind() == WHITESPACE)
|
.filter(|t| t.kind() == WHITESPACE)
|
||||||
.map(|t| t.text_range())
|
.map(|t| t.text_range())
|
||||||
.unwrap_or(TextRange::from_to(TextUnit::from(0), TextUnit::from(0)));
|
.unwrap_or_else(|| TextRange::from_to(TextUnit::from(0), TextUnit::from(0)));
|
||||||
edit.delete(line_break_range);
|
edit.delete(line_break_range);
|
||||||
|
|
||||||
attr_range.len() + line_break_range.len()
|
attr_range.len() + line_break_range.len()
|
||||||
|
|
|
@ -53,7 +53,7 @@ pub(crate) fn add_new(ctx: AssistCtx) -> Option<Assist> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let vis = strukt.visibility().map(|v| format!("{} ", v.syntax()));
|
let vis = strukt.visibility().map(|v| format!("{} ", v.syntax()));
|
||||||
let vis = vis.as_ref().map(String::as_str).unwrap_or("");
|
let vis = vis.as_deref().unwrap_or("");
|
||||||
write!(&mut buf, " {}fn new(", vis).unwrap();
|
write!(&mut buf, " {}fn new(", vis).unwrap();
|
||||||
|
|
||||||
join(field_list.fields().filter_map(|f| {
|
join(field_list.fields().filter_map(|f| {
|
||||||
|
|
|
@ -44,7 +44,7 @@ pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx) -> Option<Assist> {
|
||||||
edit.target(guard.syntax().text_range());
|
edit.target(guard.syntax().text_range());
|
||||||
let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) {
|
let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) {
|
||||||
Some(tok) => {
|
Some(tok) => {
|
||||||
if let Some(_) = ast::Whitespace::cast(tok.clone()) {
|
if ast::Whitespace::cast(tok.clone()).is_some() {
|
||||||
let ele = tok.text_range();
|
let ele = tok.text_range();
|
||||||
edit.delete(ele);
|
edit.delete(ele);
|
||||||
ele.len()
|
ele.len()
|
||||||
|
@ -98,11 +98,11 @@ pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx) -> Option<Assist> {
|
||||||
let then_block = if_expr.then_branch()?;
|
let then_block = if_expr.then_branch()?;
|
||||||
|
|
||||||
// Not support if with else branch
|
// Not support if with else branch
|
||||||
if let Some(_) = if_expr.else_branch() {
|
if if_expr.else_branch().is_some() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
// Not support moving if let to arm guard
|
// Not support moving if let to arm guard
|
||||||
if let Some(_) = cond.pat() {
|
if cond.pat().is_some() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -61,7 +61,7 @@ pub(crate) fn replace_if_let_with_match(ctx: AssistCtx) -> Option<Assist> {
|
||||||
|
|
||||||
edit.target(if_expr.syntax().text_range());
|
edit.target(if_expr.syntax().text_range());
|
||||||
edit.set_cursor(if_expr.syntax().text_range().start());
|
edit.set_cursor(if_expr.syntax().text_range().start());
|
||||||
edit.replace_ast::<ast::Expr>(if_expr.into(), match_expr.into());
|
edit.replace_ast::<ast::Expr>(if_expr.into(), match_expr);
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,8 +38,8 @@ pub struct GroupLabel(pub String);
|
||||||
impl AssistLabel {
|
impl AssistLabel {
|
||||||
pub(crate) fn new(label: String, id: AssistId) -> AssistLabel {
|
pub(crate) fn new(label: String, id: AssistId) -> AssistLabel {
|
||||||
// FIXME: make fields private, so that this invariant can't be broken
|
// FIXME: make fields private, so that this invariant can't be broken
|
||||||
assert!(label.chars().nth(0).unwrap().is_uppercase());
|
assert!(label.chars().next().unwrap().is_uppercase());
|
||||||
AssistLabel { label: label.into(), id }
|
AssistLabel { label, id }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,14 +5,14 @@ version = "0.1.0"
|
||||||
authors = ["rust-analyzer developers"]
|
authors = ["rust-analyzer developers"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
crossbeam-channel = "0.4"
|
crossbeam-channel = "0.4.0"
|
||||||
lsp-types = { version = "0.70.0", features = ["proposed"] }
|
lsp-types = { version = "0.70.1", features = ["proposed"] }
|
||||||
log = "0.4.3"
|
log = "0.4.8"
|
||||||
cargo_metadata = "0.9.1"
|
cargo_metadata = "0.9.1"
|
||||||
jod-thread = "0.1.0"
|
jod-thread = "0.1.0"
|
||||||
parking_lot = "0.10.0"
|
parking_lot = "0.10.0"
|
||||||
serde_json = "1.0.45"
|
serde_json = "1.0.48"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "0.13.0"
|
insta = "0.13.1"
|
||||||
serde_json = "1.0"
|
serde_json = "1.0.48"
|
||||||
|
|
|
@ -234,7 +234,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
|
||||||
let child = map_rust_child_diagnostic(&child, workspace_root);
|
let child = map_rust_child_diagnostic(&child, workspace_root);
|
||||||
match child {
|
match child {
|
||||||
MappedRustChildDiagnostic::Related(related) => related_information.push(related),
|
MappedRustChildDiagnostic::Related(related) => related_information.push(related),
|
||||||
MappedRustChildDiagnostic::SuggestedFix(code_action) => fixes.push(code_action.into()),
|
MappedRustChildDiagnostic::SuggestedFix(code_action) => fixes.push(code_action),
|
||||||
MappedRustChildDiagnostic::MessageLine(message_line) => {
|
MappedRustChildDiagnostic::MessageLine(message_line) => {
|
||||||
write!(&mut message, "\n{}", message_line).unwrap();
|
write!(&mut message, "\n{}", message_line).unwrap();
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ authors = ["rust-analyzer developers"]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
rustc-hash = "1.0.1"
|
rustc-hash = "1.1.0"
|
||||||
|
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
tt = { path = "../ra_tt", package = "ra_tt" }
|
tt = { path = "../ra_tt", package = "ra_tt" }
|
||||||
|
|
|
@ -10,7 +10,7 @@ doctest = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
salsa = "0.14.1"
|
salsa = "0.14.1"
|
||||||
relative-path = "1.0.0"
|
relative-path = "1.0.0"
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "1.1.0"
|
||||||
|
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
ra_cfg = { path = "../ra_cfg" }
|
ra_cfg = { path = "../ra_cfg" }
|
||||||
|
|
|
@ -249,7 +249,7 @@ impl FromStr for Edition {
|
||||||
let res = match s {
|
let res = match s {
|
||||||
"2015" => Edition::Edition2015,
|
"2015" => Edition::Edition2015,
|
||||||
"2018" => Edition::Edition2018,
|
"2018" => Edition::Edition2018,
|
||||||
_ => Err(ParseEditionError { invalid_input: s.to_string() })?,
|
_ => return Err(ParseEditionError { invalid_input: s.to_string() }),
|
||||||
};
|
};
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,6 @@ publish = false
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
itertools = "0.8.0"
|
itertools = "0.8.2"
|
||||||
|
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
|
|
|
@ -8,9 +8,9 @@ authors = ["rust-analyzer developers"]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.4.5"
|
log = "0.4.8"
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "1.1.0"
|
||||||
either = "1.5"
|
either = "1.5.3"
|
||||||
|
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
ra_db = { path = "../ra_db" }
|
ra_db = { path = "../ra_db" }
|
||||||
|
|
|
@ -283,7 +283,7 @@ impl StructField {
|
||||||
};
|
};
|
||||||
let substs = Substs::type_params(db, generic_def_id);
|
let substs = Substs::type_params(db, generic_def_id);
|
||||||
let ty = db.field_types(var_id)[self.id].clone().subst(&substs);
|
let ty = db.field_types(var_id)[self.id].clone().subst(&substs);
|
||||||
Type::new(db, self.parent.module(db).id.krate.into(), var_id, ty)
|
Type::new(db, self.parent.module(db).id.krate, var_id, ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parent_def(&self, _db: &impl HirDatabase) -> VariantDef {
|
pub fn parent_def(&self, _db: &impl HirDatabase) -> VariantDef {
|
||||||
|
@ -315,11 +315,11 @@ impl Struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn name(self, db: &impl DefDatabase) -> Name {
|
pub fn name(self, db: &impl DefDatabase) -> Name {
|
||||||
db.struct_data(self.id.into()).name.clone()
|
db.struct_data(self.id).name.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
|
pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> {
|
||||||
db.struct_data(self.id.into())
|
db.struct_data(self.id)
|
||||||
.variant_data
|
.variant_data
|
||||||
.fields()
|
.fields()
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -332,7 +332,7 @@ impl Struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
|
||||||
db.struct_data(self.id.into()).variant_data.clone()
|
db.struct_data(self.id).variant_data.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -988,20 +988,17 @@ impl Type {
|
||||||
|
|
||||||
pub fn fields(&self, db: &impl HirDatabase) -> Vec<(StructField, Type)> {
|
pub fn fields(&self, db: &impl HirDatabase) -> Vec<(StructField, Type)> {
|
||||||
if let Ty::Apply(a_ty) = &self.ty.value {
|
if let Ty::Apply(a_ty) = &self.ty.value {
|
||||||
match a_ty.ctor {
|
if let TypeCtor::Adt(AdtId::StructId(s)) = a_ty.ctor {
|
||||||
TypeCtor::Adt(AdtId::StructId(s)) => {
|
let var_def = s.into();
|
||||||
let var_def = s.into();
|
return db
|
||||||
return db
|
.field_types(var_def)
|
||||||
.field_types(var_def)
|
.iter()
|
||||||
.iter()
|
.map(|(local_id, ty)| {
|
||||||
.map(|(local_id, ty)| {
|
let def = StructField { parent: var_def.into(), id: local_id };
|
||||||
let def = StructField { parent: var_def.into(), id: local_id };
|
let ty = ty.clone().subst(&a_ty.parameters);
|
||||||
let ty = ty.clone().subst(&a_ty.parameters);
|
(def, self.derived(ty))
|
||||||
(def, self.derived(ty))
|
})
|
||||||
})
|
.collect();
|
||||||
.collect();
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
Vec::new()
|
Vec::new()
|
||||||
|
@ -1010,14 +1007,11 @@ impl Type {
|
||||||
pub fn tuple_fields(&self, _db: &impl HirDatabase) -> Vec<Type> {
|
pub fn tuple_fields(&self, _db: &impl HirDatabase) -> Vec<Type> {
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
if let Ty::Apply(a_ty) = &self.ty.value {
|
if let Ty::Apply(a_ty) = &self.ty.value {
|
||||||
match a_ty.ctor {
|
if let TypeCtor::Tuple { .. } = a_ty.ctor {
|
||||||
TypeCtor::Tuple { .. } => {
|
for ty in a_ty.parameters.iter() {
|
||||||
for ty in a_ty.parameters.iter() {
|
let ty = ty.clone();
|
||||||
let ty = ty.clone();
|
res.push(self.derived(ty));
|
||||||
res.push(self.derived(ty));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
res
|
res
|
||||||
|
@ -1049,7 +1043,7 @@ impl Type {
|
||||||
// FIXME check that?
|
// FIXME check that?
|
||||||
let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 };
|
let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 };
|
||||||
let environment = self.ty.environment.clone();
|
let environment = self.ty.environment.clone();
|
||||||
let ty = InEnvironment { value: canonical, environment: environment.clone() };
|
let ty = InEnvironment { value: canonical, environment };
|
||||||
autoderef(db, Some(self.krate), ty)
|
autoderef(db, Some(self.krate), ty)
|
||||||
.map(|canonical| canonical.value)
|
.map(|canonical| canonical.value)
|
||||||
.map(move |ty| self.derived(ty))
|
.map(move |ty| self.derived(ty))
|
||||||
|
|
|
@ -361,9 +361,8 @@ impl SourceAnalyzer {
|
||||||
db: &impl HirDatabase,
|
db: &impl HirDatabase,
|
||||||
macro_call: InFile<&ast::MacroCall>,
|
macro_call: InFile<&ast::MacroCall>,
|
||||||
) -> Option<Expansion> {
|
) -> Option<Expansion> {
|
||||||
let macro_call_id = macro_call.as_call_id(db, |path| {
|
let macro_call_id =
|
||||||
self.resolver.resolve_path_as_macro(db, &path).map(|it| it.into())
|
macro_call.as_call_id(db, |path| self.resolver.resolve_path_as_macro(db, &path))?;
|
||||||
})?;
|
|
||||||
Some(Expansion { macro_call_id })
|
Some(Expansion { macro_call_id })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,11 +8,11 @@ authors = ["rust-analyzer developers"]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.4.5"
|
log = "0.4.8"
|
||||||
once_cell = "1.0.1"
|
once_cell = "1.3.1"
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "1.1.0"
|
||||||
either = "1.5"
|
either = "1.5.3"
|
||||||
anymap = "0.12"
|
anymap = "0.12.1"
|
||||||
drop_bomb = "0.1.4"
|
drop_bomb = "0.1.4"
|
||||||
itertools = "0.8.2"
|
itertools = "0.8.2"
|
||||||
|
|
||||||
|
@ -27,4 +27,4 @@ ra_cfg = { path = "../ra_cfg" }
|
||||||
tt = { path = "../ra_tt", package = "ra_tt" }
|
tt = { path = "../ra_tt", package = "ra_tt" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "0.13.0"
|
insta = "0.13.1"
|
||||||
|
|
|
@ -448,7 +448,7 @@ where
|
||||||
// FIXME expand to statements in statement position
|
// FIXME expand to statements in statement position
|
||||||
ast::Expr::MacroCall(e) => {
|
ast::Expr::MacroCall(e) => {
|
||||||
let macro_call = self.expander.to_source(AstPtr::new(&e));
|
let macro_call = self.expander.to_source(AstPtr::new(&e));
|
||||||
match self.expander.enter_expand(self.db, e.clone()) {
|
match self.expander.enter_expand(self.db, e) {
|
||||||
Some((mark, expansion)) => {
|
Some((mark, expansion)) => {
|
||||||
self.source_map
|
self.source_map
|
||||||
.expansions
|
.expansions
|
||||||
|
|
|
@ -71,7 +71,7 @@ impl GenericParams {
|
||||||
db: &impl DefDatabase,
|
db: &impl DefDatabase,
|
||||||
def: GenericDefId,
|
def: GenericDefId,
|
||||||
) -> Arc<GenericParams> {
|
) -> Arc<GenericParams> {
|
||||||
let (params, _source_map) = GenericParams::new(db, def.into());
|
let (params, _source_map) = GenericParams::new(db, def);
|
||||||
Arc::new(params)
|
Arc::new(params)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -138,7 +138,7 @@ impl ItemScope {
|
||||||
|
|
||||||
pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool {
|
pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool {
|
||||||
let mut changed = false;
|
let mut changed = false;
|
||||||
let existing = self.visible.entry(name.clone()).or_default();
|
let existing = self.visible.entry(name).or_default();
|
||||||
|
|
||||||
if existing.types.is_none() && def.types.is_some() {
|
if existing.types.is_none() && def.types.is_some() {
|
||||||
existing.types = def.types;
|
existing.types = def.types;
|
||||||
|
@ -157,7 +157,7 @@ impl ItemScope {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Name, PerNs)> + 'a {
|
pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Name, PerNs)> + 'a {
|
||||||
self.visible.iter().map(|(name, res)| (name.clone(), res.clone()))
|
self.visible.iter().map(|(name, res)| (name.clone(), *res))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, MacroDefId> {
|
pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, MacroDefId> {
|
||||||
|
|
|
@ -460,7 +460,7 @@ impl AsMacroCall for AstIdWithPath<ast::MacroCall> {
|
||||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
||||||
) -> Option<MacroCallId> {
|
) -> Option<MacroCallId> {
|
||||||
let def = resolver(self.path.clone())?;
|
let def = resolver(self.path.clone())?;
|
||||||
Some(def.as_call_id(db, MacroCallKind::FnLike(self.ast_id.clone())))
|
Some(def.as_call_id(db, MacroCallKind::FnLike(self.ast_id)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -471,6 +471,6 @@ impl AsMacroCall for AstIdWithPath<ast::ModuleItem> {
|
||||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
||||||
) -> Option<MacroCallId> {
|
) -> Option<MacroCallId> {
|
||||||
let def = resolver(self.path.clone())?;
|
let def = resolver(self.path.clone())?;
|
||||||
Some(def.as_call_id(db, MacroCallKind::Attr(self.ast_id.clone())))
|
Some(def.as_call_id(db, MacroCallKind::Attr(self.ast_id)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -156,7 +156,7 @@ impl ModuleOrigin {
|
||||||
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
|
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => {
|
||||||
let file_id = *definition;
|
let file_id = *definition;
|
||||||
let sf = db.parse(file_id).tree();
|
let sf = db.parse(file_id).tree();
|
||||||
return InFile::new(file_id.into(), ModuleSource::SourceFile(sf));
|
InFile::new(file_id.into(), ModuleSource::SourceFile(sf))
|
||||||
}
|
}
|
||||||
ModuleOrigin::Inline { definition } => {
|
ModuleOrigin::Inline { definition } => {
|
||||||
InFile::new(definition.file_id, ModuleSource::Module(definition.to_node(db)))
|
InFile::new(definition.file_id, ModuleSource::Module(definition.to_node(db)))
|
||||||
|
|
|
@ -357,9 +357,7 @@ impl RawItemsCollector {
|
||||||
let visibility =
|
let visibility =
|
||||||
RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene);
|
RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene);
|
||||||
let alias = extern_crate.alias().map(|a| {
|
let alias = extern_crate.alias().map(|a| {
|
||||||
a.name()
|
a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
|
||||||
.map(|it| it.as_name())
|
|
||||||
.map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a))
|
|
||||||
});
|
});
|
||||||
let attrs = self.parse_attrs(&extern_crate);
|
let attrs = self.parse_attrs(&extern_crate);
|
||||||
// FIXME: cfg_attr
|
// FIXME: cfg_attr
|
||||||
|
|
|
@ -116,7 +116,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() {
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
let crate_def_map = db.crate_def_map(krate);
|
let crate_def_map = db.crate_def_map(krate);
|
||||||
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
|
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
|
||||||
assert_eq!(module_data.scope.resolutions().collect::<Vec<_>>().len(), 1);
|
assert_eq!(module_data.scope.resolutions().count(), 1);
|
||||||
});
|
});
|
||||||
assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
|
assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() {
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
let crate_def_map = db.crate_def_map(krate);
|
let crate_def_map = db.crate_def_map(krate);
|
||||||
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
|
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
|
||||||
assert_eq!(module_data.scope.resolutions().collect::<Vec<_>>().len(), 1);
|
assert_eq!(module_data.scope.resolutions().count(), 1);
|
||||||
});
|
});
|
||||||
assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
|
assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,9 +32,7 @@ pub(crate) fn lower_use_tree(
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let alias = tree.alias().map(|a| {
|
let alias = tree.alias().map(|a| {
|
||||||
a.name()
|
a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
|
||||||
.map(|it| it.as_name())
|
|
||||||
.map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a))
|
|
||||||
});
|
});
|
||||||
let is_glob = tree.has_star();
|
let is_glob = tree.has_star();
|
||||||
if let Some(ast_path) = tree.path() {
|
if let Some(ast_path) = tree.path() {
|
||||||
|
|
|
@ -474,7 +474,7 @@ impl Scope {
|
||||||
f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_, Visibility::Public)));
|
f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_, Visibility::Public)));
|
||||||
});
|
});
|
||||||
m.crate_def_map.extern_prelude.iter().for_each(|(name, &def)| {
|
m.crate_def_map.extern_prelude.iter().for_each(|(name, &def)| {
|
||||||
f(name.clone(), ScopeDef::PerNs(PerNs::types(def.into(), Visibility::Public)));
|
f(name.clone(), ScopeDef::PerNs(PerNs::types(def, Visibility::Public)));
|
||||||
});
|
});
|
||||||
if let Some(prelude) = m.crate_def_map.prelude {
|
if let Some(prelude) = m.crate_def_map.prelude {
|
||||||
let prelude_def_map = db.crate_def_map(prelude.krate);
|
let prelude_def_map = db.crate_def_map(prelude.krate);
|
||||||
|
@ -499,10 +499,10 @@ impl Scope {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Scope::ImplBlockScope(i) => {
|
Scope::ImplBlockScope(i) => {
|
||||||
f(name![Self], ScopeDef::ImplSelfType((*i).into()));
|
f(name![Self], ScopeDef::ImplSelfType(*i));
|
||||||
}
|
}
|
||||||
Scope::AdtScope(i) => {
|
Scope::AdtScope(i) => {
|
||||||
f(name![Self], ScopeDef::AdtSelfType((*i).into()));
|
f(name![Self], ScopeDef::AdtSelfType(*i));
|
||||||
}
|
}
|
||||||
Scope::ExprScope(scope) => {
|
Scope::ExprScope(scope) => {
|
||||||
scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| {
|
scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| {
|
||||||
|
|
|
@ -8,8 +8,8 @@ authors = ["rust-analyzer developers"]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.4.5"
|
log = "0.4.8"
|
||||||
either = "1.5"
|
either = "1.5.3"
|
||||||
|
|
||||||
ra_arena = { path = "../ra_arena" }
|
ra_arena = { path = "../ra_arena" }
|
||||||
ra_db = { path = "../ra_db" }
|
ra_db = { path = "../ra_db" }
|
||||||
|
|
|
@ -235,7 +235,7 @@ mod tests {
|
||||||
let (db, file_id) = TestDB::with_single_file(&s);
|
let (db, file_id) = TestDB::with_single_file(&s);
|
||||||
let parsed = db.parse(file_id);
|
let parsed = db.parse(file_id);
|
||||||
let items: Vec<_> =
|
let items: Vec<_> =
|
||||||
parsed.syntax_node().descendants().filter_map(|it| ast::ModuleItem::cast(it)).collect();
|
parsed.syntax_node().descendants().filter_map(ast::ModuleItem::cast).collect();
|
||||||
|
|
||||||
let ast_id_map = db.ast_id_map(file_id.into());
|
let ast_id_map = db.ast_id_map(file_id.into());
|
||||||
|
|
||||||
|
|
|
@ -155,14 +155,11 @@ fn compile_error_expand(
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
) -> Result<tt::Subtree, mbe::ExpandError> {
|
) -> Result<tt::Subtree, mbe::ExpandError> {
|
||||||
if tt.count() == 1 {
|
if tt.count() == 1 {
|
||||||
match &tt.token_trees[0] {
|
if let tt::TokenTree::Leaf(tt::Leaf::Literal(it)) = &tt.token_trees[0] {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => {
|
let s = it.text.as_str();
|
||||||
let s = it.text.as_str();
|
if s.contains('"') {
|
||||||
if s.contains(r#"""#) {
|
return Ok(quote! { loop { #it }});
|
||||||
return Ok(quote! { loop { #it }});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,7 +219,7 @@ mod tests {
|
||||||
let (db, file_id) = TestDB::with_single_file(&s);
|
let (db, file_id) = TestDB::with_single_file(&s);
|
||||||
let parsed = db.parse(file_id);
|
let parsed = db.parse(file_id);
|
||||||
let macro_calls: Vec<_> =
|
let macro_calls: Vec<_> =
|
||||||
parsed.syntax_node().descendants().filter_map(|it| ast::MacroCall::cast(it)).collect();
|
parsed.syntax_node().descendants().filter_map(ast::MacroCall::cast).collect();
|
||||||
|
|
||||||
let ast_id_map = db.ast_id_map(file_id.into());
|
let ast_id_map = db.ast_id_map(file_id.into());
|
||||||
|
|
||||||
|
|
|
@ -15,14 +15,13 @@ macro_rules! __quote {
|
||||||
( @SUBTREE $delim:ident $($tt:tt)* ) => {
|
( @SUBTREE $delim:ident $($tt:tt)* ) => {
|
||||||
{
|
{
|
||||||
let children = $crate::__quote!($($tt)*);
|
let children = $crate::__quote!($($tt)*);
|
||||||
let subtree = tt::Subtree {
|
tt::Subtree {
|
||||||
delimiter: Some(tt::Delimiter {
|
delimiter: Some(tt::Delimiter {
|
||||||
kind: tt::DelimiterKind::$delim,
|
kind: tt::DelimiterKind::$delim,
|
||||||
id: tt::TokenId::unspecified(),
|
id: tt::TokenId::unspecified(),
|
||||||
}),
|
}),
|
||||||
token_trees: $crate::quote::IntoTt::to_tokens(children),
|
token_trees: $crate::quote::IntoTt::to_tokens(children),
|
||||||
};
|
}
|
||||||
subtree
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -259,8 +258,7 @@ mod tests {
|
||||||
// }
|
// }
|
||||||
let struct_name = mk_ident("Foo");
|
let struct_name = mk_ident("Foo");
|
||||||
let fields = [mk_ident("name"), mk_ident("id")];
|
let fields = [mk_ident("name"), mk_ident("id")];
|
||||||
let fields =
|
let fields = fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees).flatten();
|
||||||
fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten();
|
|
||||||
|
|
||||||
let list = tt::Subtree {
|
let list = tt::Subtree {
|
||||||
delimiter: Some(tt::Delimiter {
|
delimiter: Some(tt::Delimiter {
|
||||||
|
|
|
@ -9,9 +9,9 @@ doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
arrayvec = "0.5.1"
|
arrayvec = "0.5.1"
|
||||||
ena = "0.13"
|
ena = "0.13.1"
|
||||||
log = "0.4.5"
|
log = "0.4.8"
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "1.1.0"
|
||||||
|
|
||||||
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
|
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
|
||||||
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
|
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
|
||||||
|
@ -28,4 +28,4 @@ chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f30
|
||||||
lalrpop-intern = "0.15.1"
|
lalrpop-intern = "0.15.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "0.13.0"
|
insta = "0.13.1"
|
||||||
|
|
|
@ -40,7 +40,7 @@ impl Diagnostic for MissingFields {
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
let mut message = String::from("Missing structure fields:\n");
|
let mut message = String::from("Missing structure fields:\n");
|
||||||
for field in &self.missed_fields {
|
for field in &self.missed_fields {
|
||||||
write!(message, "- {}\n", field).unwrap();
|
writeln!(message, "- {}", field).unwrap();
|
||||||
}
|
}
|
||||||
message
|
message
|
||||||
}
|
}
|
||||||
|
|
|
@ -138,7 +138,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
|
|
||||||
if params.len() == 2 && ¶ms[0] == &mismatch.actual {
|
if params.len() == 2 && params[0] == mismatch.actual {
|
||||||
let (_, source_map) = db.body_with_source_map(self.func.into());
|
let (_, source_map) = db.body_with_source_map(self.func.into());
|
||||||
|
|
||||||
if let Some(source_ptr) = source_map.expr_syntax(id) {
|
if let Some(source_ptr) = source_map.expr_syntax(id) {
|
||||||
|
|
|
@ -225,14 +225,14 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver),
|
coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver),
|
||||||
db,
|
db,
|
||||||
owner,
|
owner,
|
||||||
body: db.body(owner.into()),
|
body: db.body(owner),
|
||||||
resolver,
|
resolver,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_all(mut self) -> InferenceResult {
|
fn resolve_all(mut self) -> InferenceResult {
|
||||||
// FIXME resolve obligations as well (use Guidance if necessary)
|
// FIXME resolve obligations as well (use Guidance if necessary)
|
||||||
let mut result = mem::replace(&mut self.result, InferenceResult::default());
|
let mut result = std::mem::take(&mut self.result);
|
||||||
for ty in result.type_of_expr.values_mut() {
|
for ty in result.type_of_expr.values_mut() {
|
||||||
let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
|
let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
|
||||||
*ty = resolved;
|
*ty = resolved;
|
||||||
|
@ -261,7 +261,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
|
fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
|
||||||
self.result.assoc_resolutions.insert(id, item.into());
|
self.result.assoc_resolutions.insert(id, item);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
|
fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
|
||||||
|
@ -312,9 +312,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
for obligation in obligations {
|
for obligation in obligations {
|
||||||
let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone());
|
let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone());
|
||||||
let canonicalized = self.canonicalizer().canonicalize_obligation(in_env);
|
let canonicalized = self.canonicalizer().canonicalize_obligation(in_env);
|
||||||
let solution = self
|
let solution =
|
||||||
.db
|
self.db.trait_solve(self.resolver.krate().unwrap(), canonicalized.value.clone());
|
||||||
.trait_solve(self.resolver.krate().unwrap().into(), canonicalized.value.clone());
|
|
||||||
|
|
||||||
match solution {
|
match solution {
|
||||||
Some(Solution::Unique(substs)) => {
|
Some(Solution::Unique(substs)) => {
|
||||||
|
|
|
@ -26,7 +26,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
/// Note that it is only possible that one type are coerced to another.
|
/// Note that it is only possible that one type are coerced to another.
|
||||||
/// Coercing both types to another least upper bound type is not possible in rustc,
|
/// Coercing both types to another least upper bound type is not possible in rustc,
|
||||||
/// which will simply result in "incompatible types" error.
|
/// which will simply result in "incompatible types" error.
|
||||||
pub(super) fn coerce_merge_branch<'t>(&mut self, ty1: &Ty, ty2: &Ty) -> Ty {
|
pub(super) fn coerce_merge_branch(&mut self, ty1: &Ty, ty2: &Ty) -> Ty {
|
||||||
if self.coerce(ty1, ty2) {
|
if self.coerce(ty1, ty2) {
|
||||||
ty2.clone()
|
ty2.clone()
|
||||||
} else if self.coerce(ty2, ty1) {
|
} else if self.coerce(ty2, ty1) {
|
||||||
|
@ -44,10 +44,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
resolver: &Resolver,
|
resolver: &Resolver,
|
||||||
) -> FxHashMap<(TypeCtor, TypeCtor), usize> {
|
) -> FxHashMap<(TypeCtor, TypeCtor), usize> {
|
||||||
let krate = resolver.krate().unwrap();
|
let krate = resolver.krate().unwrap();
|
||||||
let impls = match db.lang_item(krate.into(), "coerce_unsized".into()) {
|
let impls = match db.lang_item(krate, "coerce_unsized".into()) {
|
||||||
Some(LangItemTarget::TraitId(trait_)) => {
|
Some(LangItemTarget::TraitId(trait_)) => db.impls_for_trait(krate, trait_),
|
||||||
db.impls_for_trait(krate.into(), trait_.into())
|
|
||||||
}
|
|
||||||
_ => return FxHashMap::default(),
|
_ => return FxHashMap::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -254,15 +252,14 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
let unsize_generic_index = {
|
let unsize_generic_index = {
|
||||||
let mut index = None;
|
let mut index = None;
|
||||||
let mut multiple_param = false;
|
let mut multiple_param = false;
|
||||||
field_tys[last_field_id].value.walk(&mut |ty| match ty {
|
field_tys[last_field_id].value.walk(&mut |ty| {
|
||||||
&Ty::Bound(idx) => {
|
if let &Ty::Bound(idx) = ty {
|
||||||
if index.is_none() {
|
if index.is_none() {
|
||||||
index = Some(idx);
|
index = Some(idx);
|
||||||
} else if Some(idx) != index {
|
} else if Some(idx) != index {
|
||||||
multiple_param = true;
|
multiple_param = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if multiple_param {
|
if multiple_param {
|
||||||
|
|
|
@ -35,8 +35,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() },
|
TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
let ty = self.resolve_ty_as_possible(ty);
|
self.resolve_ty_as_possible(ty)
|
||||||
ty
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Infer type of expression with possibly implicit coerce to the expected type.
|
/// Infer type of expression with possibly implicit coerce to the expected type.
|
||||||
|
@ -127,10 +126,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 },
|
TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 },
|
||||||
Substs(sig_tys.into()),
|
Substs(sig_tys.into()),
|
||||||
);
|
);
|
||||||
let closure_ty = Ty::apply_one(
|
let closure_ty =
|
||||||
TypeCtor::Closure { def: self.owner.into(), expr: tgt_expr },
|
Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty);
|
||||||
sig_ty,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Eagerly try to relate the closure type with the expected
|
// Eagerly try to relate the closure type with the expected
|
||||||
// type, otherwise we often won't have enough information to
|
// type, otherwise we often won't have enough information to
|
||||||
|
@ -157,15 +154,14 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
};
|
};
|
||||||
self.register_obligations_for_call(&callee_ty);
|
self.register_obligations_for_call(&callee_ty);
|
||||||
self.check_call_arguments(args, ¶m_tys);
|
self.check_call_arguments(args, ¶m_tys);
|
||||||
let ret_ty = self.normalize_associated_types_in(ret_ty);
|
self.normalize_associated_types_in(ret_ty)
|
||||||
ret_ty
|
|
||||||
}
|
}
|
||||||
Expr::MethodCall { receiver, args, method_name, generic_args } => self
|
Expr::MethodCall { receiver, args, method_name, generic_args } => self
|
||||||
.infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()),
|
.infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()),
|
||||||
Expr::Match { expr, arms } => {
|
Expr::Match { expr, arms } => {
|
||||||
let input_ty = self.infer_expr(*expr, &Expectation::none());
|
let input_ty = self.infer_expr(*expr, &Expectation::none());
|
||||||
|
|
||||||
let mut result_ty = if arms.len() == 0 {
|
let mut result_ty = if arms.is_empty() {
|
||||||
Ty::simple(TypeCtor::Never)
|
Ty::simple(TypeCtor::Never)
|
||||||
} else {
|
} else {
|
||||||
self.table.new_type_var()
|
self.table.new_type_var()
|
||||||
|
@ -188,7 +184,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
}
|
}
|
||||||
Expr::Path(p) => {
|
Expr::Path(p) => {
|
||||||
// FIXME this could be more efficient...
|
// FIXME this could be more efficient...
|
||||||
let resolver = resolver_for_expr(self.db, self.owner.into(), tgt_expr);
|
let resolver = resolver_for_expr(self.db, self.owner, tgt_expr);
|
||||||
self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown)
|
self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown)
|
||||||
}
|
}
|
||||||
Expr::Continue => Ty::simple(TypeCtor::Never),
|
Expr::Continue => Ty::simple(TypeCtor::Never),
|
||||||
|
@ -217,8 +213,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
self.unify(&ty, &expected.ty);
|
self.unify(&ty, &expected.ty);
|
||||||
|
|
||||||
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
||||||
let field_types =
|
let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
|
||||||
def_id.map(|it| self.db.field_types(it.into())).unwrap_or_default();
|
|
||||||
let variant_data = def_id.map(|it| variant_data(self.db, it));
|
let variant_data = def_id.map(|it| variant_data(self.db, it));
|
||||||
for (field_idx, field) in fields.iter().enumerate() {
|
for (field_idx, field) in fields.iter().enumerate() {
|
||||||
let field_def =
|
let field_def =
|
||||||
|
@ -264,7 +259,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
.and_then(|idx| a_ty.parameters.0.get(idx).cloned()),
|
.and_then(|idx| a_ty.parameters.0.get(idx).cloned()),
|
||||||
TypeCtor::Adt(AdtId::StructId(s)) => {
|
TypeCtor::Adt(AdtId::StructId(s)) => {
|
||||||
self.db.struct_data(s).variant_data.field(name).map(|local_id| {
|
self.db.struct_data(s).variant_data.field(name).map(|local_id| {
|
||||||
let field = StructFieldId { parent: s.into(), local_id }.into();
|
let field = StructFieldId { parent: s.into(), local_id };
|
||||||
self.write_field_resolution(tgt_expr, field);
|
self.write_field_resolution(tgt_expr, field);
|
||||||
self.db.field_types(s.into())[field.local_id]
|
self.db.field_types(s.into())[field.local_id]
|
||||||
.clone()
|
.clone()
|
||||||
|
@ -283,14 +278,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
}
|
}
|
||||||
Expr::Await { expr } => {
|
Expr::Await { expr } => {
|
||||||
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
|
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
|
||||||
let ty =
|
self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
|
||||||
self.resolve_associated_type(inner_ty, self.resolve_future_future_output());
|
|
||||||
ty
|
|
||||||
}
|
}
|
||||||
Expr::Try { expr } => {
|
Expr::Try { expr } => {
|
||||||
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
|
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
|
||||||
let ty = self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok());
|
self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
|
||||||
ty
|
|
||||||
}
|
}
|
||||||
Expr::Cast { expr, type_ref } => {
|
Expr::Cast { expr, type_ref } => {
|
||||||
let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
|
let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
|
||||||
|
@ -614,8 +606,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
self.unify(&expected_receiver_ty, &actual_receiver_ty);
|
self.unify(&expected_receiver_ty, &actual_receiver_ty);
|
||||||
|
|
||||||
self.check_call_arguments(args, ¶m_tys);
|
self.check_call_arguments(args, ¶m_tys);
|
||||||
let ret_ty = self.normalize_associated_types_in(ret_ty);
|
self.normalize_associated_types_in(ret_ty)
|
||||||
ret_ty
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) {
|
fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) {
|
||||||
|
@ -700,10 +691,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
// construct a TraitDef
|
// construct a TraitDef
|
||||||
let substs =
|
let substs =
|
||||||
a_ty.parameters.prefix(generics(self.db, trait_.into()).len());
|
a_ty.parameters.prefix(generics(self.db, trait_.into()).len());
|
||||||
self.obligations.push(Obligation::Trait(TraitRef {
|
self.obligations.push(Obligation::Trait(TraitRef { trait_, substs }));
|
||||||
trait_: trait_.into(),
|
|
||||||
substs,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {}
|
CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {}
|
||||||
|
|
|
@ -28,7 +28,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
|
|
||||||
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
||||||
|
|
||||||
let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
|
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
|
||||||
|
|
||||||
for (i, &subpat) in subpats.iter().enumerate() {
|
for (i, &subpat) in subpats.iter().enumerate() {
|
||||||
let expected_ty = var_data
|
let expected_ty = var_data
|
||||||
|
@ -60,7 +60,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
|
|
||||||
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
let substs = ty.substs().unwrap_or_else(Substs::empty);
|
||||||
|
|
||||||
let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default();
|
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
|
||||||
for subpat in subpats {
|
for subpat in subpats {
|
||||||
let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
|
let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
|
||||||
let expected_ty =
|
let expected_ty =
|
||||||
|
|
|
@ -104,8 +104,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
let segment =
|
let segment =
|
||||||
remaining_segments.last().expect("there should be at least one segment here");
|
remaining_segments.last().expect("there should be at least one segment here");
|
||||||
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
|
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
|
||||||
let trait_ref =
|
let trait_ref = TraitRef::from_resolved_path(&ctx, trait_, resolved_segment, None);
|
||||||
TraitRef::from_resolved_path(&ctx, trait_.into(), resolved_segment, None);
|
|
||||||
self.resolve_trait_assoc_item(trait_ref, segment, id)
|
self.resolve_trait_assoc_item(trait_ref, segment, id)
|
||||||
}
|
}
|
||||||
(def, _) => {
|
(def, _) => {
|
||||||
|
@ -144,30 +143,32 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
id: ExprOrPatId,
|
id: ExprOrPatId,
|
||||||
) -> Option<(ValueNs, Option<Substs>)> {
|
) -> Option<(ValueNs, Option<Substs>)> {
|
||||||
let trait_ = trait_ref.trait_;
|
let trait_ = trait_ref.trait_;
|
||||||
let item = self
|
let item =
|
||||||
.db
|
self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
|
||||||
.trait_data(trait_)
|
match item {
|
||||||
.items
|
AssocItemId::FunctionId(func) => {
|
||||||
.iter()
|
if segment.name == &self.db.function_data(func).name {
|
||||||
.map(|(_name, id)| (*id).into())
|
Some(AssocItemId::FunctionId(func))
|
||||||
.find_map(|item| match item {
|
} else {
|
||||||
AssocItemId::FunctionId(func) => {
|
None
|
||||||
if segment.name == &self.db.function_data(func).name {
|
}
|
||||||
Some(AssocItemId::FunctionId(func))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
AssocItemId::ConstId(konst) => {
|
AssocItemId::ConstId(konst) => {
|
||||||
if self.db.const_data(konst).name.as_ref().map_or(false, |n| n == segment.name)
|
if self
|
||||||
{
|
.db
|
||||||
Some(AssocItemId::ConstId(konst))
|
.const_data(konst)
|
||||||
} else {
|
.name
|
||||||
None
|
.as_ref()
|
||||||
|
.map_or(false, |n| n == segment.name)
|
||||||
|
{
|
||||||
|
Some(AssocItemId::ConstId(konst))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
AssocItemId::TypeAliasId(_) => None,
|
||||||
}
|
}
|
||||||
AssocItemId::TypeAliasId(_) => None,
|
|
||||||
})?;
|
})?;
|
||||||
let def = match item {
|
let def = match item {
|
||||||
AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
|
AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
|
||||||
|
@ -233,7 +234,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||||
AssocContainerId::ContainerId(_) => None,
|
AssocContainerId::ContainerId(_) => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
self.write_assoc_resolution(id, item.into());
|
self.write_assoc_resolution(id, item);
|
||||||
Some((def, substs))
|
Some((def, substs))
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -140,13 +140,12 @@ where
|
||||||
impl<T> Canonicalized<T> {
|
impl<T> Canonicalized<T> {
|
||||||
pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
|
pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
|
||||||
ty.walk_mut_binders(
|
ty.walk_mut_binders(
|
||||||
&mut |ty, binders| match ty {
|
&mut |ty, binders| {
|
||||||
&mut Ty::Bound(idx) => {
|
if let &mut Ty::Bound(idx) = ty {
|
||||||
if idx as usize >= binders && (idx as usize - binders) < self.free_vars.len() {
|
if idx as usize >= binders && (idx as usize - binders) < self.free_vars.len() {
|
||||||
*ty = Ty::Infer(self.free_vars[idx as usize - binders]);
|
*ty = Ty::Infer(self.free_vars[idx as usize - binders]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
);
|
);
|
||||||
|
|
|
@ -167,7 +167,7 @@ impl TypeCtor {
|
||||||
| TypeCtor::Closure { .. } // 1 param representing the signature of the closure
|
| TypeCtor::Closure { .. } // 1 param representing the signature of the closure
|
||||||
=> 1,
|
=> 1,
|
||||||
TypeCtor::Adt(adt) => {
|
TypeCtor::Adt(adt) => {
|
||||||
let generic_params = generics(db, AdtId::from(adt).into());
|
let generic_params = generics(db, adt.into());
|
||||||
generic_params.len()
|
generic_params.len()
|
||||||
}
|
}
|
||||||
TypeCtor::FnDef(callable) => {
|
TypeCtor::FnDef(callable) => {
|
||||||
|
@ -247,7 +247,7 @@ pub struct ProjectionTy {
|
||||||
|
|
||||||
impl ProjectionTy {
|
impl ProjectionTy {
|
||||||
pub fn trait_ref(&self, db: &impl HirDatabase) -> TraitRef {
|
pub fn trait_ref(&self, db: &impl HirDatabase) -> TraitRef {
|
||||||
TraitRef { trait_: self.trait_(db).into(), substs: self.parameters.clone() }
|
TraitRef { trait_: self.trait_(db), substs: self.parameters.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn trait_(&self, db: &impl HirDatabase) -> TraitId {
|
fn trait_(&self, db: &impl HirDatabase) -> TraitId {
|
||||||
|
@ -763,8 +763,8 @@ pub trait TypeWalk {
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
self.walk_mut_binders(
|
self.walk_mut_binders(
|
||||||
&mut |ty, binders| match ty {
|
&mut |ty, binders| {
|
||||||
&mut Ty::Bound(idx) => {
|
if let &mut Ty::Bound(idx) = ty {
|
||||||
if idx as usize >= binders && (idx as usize - binders) < substs.len() {
|
if idx as usize >= binders && (idx as usize - binders) < substs.len() {
|
||||||
*ty = substs.0[idx as usize - binders].clone();
|
*ty = substs.0[idx as usize - binders].clone();
|
||||||
} else if idx as usize >= binders + substs.len() {
|
} else if idx as usize >= binders + substs.len() {
|
||||||
|
@ -772,7 +772,6 @@ pub trait TypeWalk {
|
||||||
*ty = Ty::Bound(idx - substs.len() as u32);
|
*ty = Ty::Bound(idx - substs.len() as u32);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {}
|
|
||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
);
|
);
|
||||||
|
|
|
@ -361,10 +361,8 @@ impl Ty {
|
||||||
for t in traits {
|
for t in traits {
|
||||||
if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name)
|
if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name)
|
||||||
{
|
{
|
||||||
let substs = Substs::build_for_def(ctx.db, t)
|
let substs =
|
||||||
.push(self_ty.clone())
|
Substs::build_for_def(ctx.db, t).push(self_ty).fill_with_unknown().build();
|
||||||
.fill_with_unknown()
|
|
||||||
.build();
|
|
||||||
// FIXME handle type parameters on the segment
|
// FIXME handle type parameters on the segment
|
||||||
return Ty::Projection(ProjectionTy { associated_ty, parameters: substs });
|
return Ty::Projection(ProjectionTy { associated_ty, parameters: substs });
|
||||||
}
|
}
|
||||||
|
@ -428,7 +426,7 @@ pub(super) fn substs_from_path_segment(
|
||||||
_add_self_param: bool,
|
_add_self_param: bool,
|
||||||
) -> Substs {
|
) -> Substs {
|
||||||
let mut substs = Vec::new();
|
let mut substs = Vec::new();
|
||||||
let def_generics = def_generic.map(|def| generics(ctx.db, def.into()));
|
let def_generics = def_generic.map(|def| generics(ctx.db, def));
|
||||||
|
|
||||||
let (parent_params, self_params, type_params, impl_trait_params) =
|
let (parent_params, self_params, type_params, impl_trait_params) =
|
||||||
def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split());
|
def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split());
|
||||||
|
@ -459,7 +457,7 @@ pub(super) fn substs_from_path_segment(
|
||||||
|
|
||||||
// handle defaults
|
// handle defaults
|
||||||
if let Some(def_generic) = def_generic {
|
if let Some(def_generic) = def_generic {
|
||||||
let default_substs = ctx.db.generic_defaults(def_generic.into());
|
let default_substs = ctx.db.generic_defaults(def_generic);
|
||||||
assert_eq!(substs.len(), default_substs.len());
|
assert_eq!(substs.len(), default_substs.len());
|
||||||
|
|
||||||
for (i, default_ty) in default_substs.iter().enumerate() {
|
for (i, default_ty) in default_substs.iter().enumerate() {
|
||||||
|
@ -483,7 +481,7 @@ impl TraitRef {
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
let segment = path.segments().last().expect("path should have at least one segment");
|
let segment = path.segments().last().expect("path should have at least one segment");
|
||||||
Some(TraitRef::from_resolved_path(ctx, resolved.into(), segment, explicit_self_ty))
|
Some(TraitRef::from_resolved_path(ctx, resolved, segment, explicit_self_ty))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_resolved_path(
|
pub(crate) fn from_resolved_path(
|
||||||
|
@ -728,7 +726,7 @@ pub(crate) fn generic_predicates_query(
|
||||||
pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs {
|
pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs {
|
||||||
let resolver = def.resolver(db);
|
let resolver = def.resolver(db);
|
||||||
let ctx = TyLoweringContext::new(db, &resolver);
|
let ctx = TyLoweringContext::new(db, &resolver);
|
||||||
let generic_params = generics(db, def.into());
|
let generic_params = generics(db, def);
|
||||||
|
|
||||||
let defaults = generic_params
|
let defaults = generic_params
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -792,7 +790,7 @@ fn type_for_builtin(def: BuiltinType) -> Ty {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFnSig {
|
fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFnSig {
|
||||||
let struct_data = db.struct_data(def.into());
|
let struct_data = db.struct_data(def);
|
||||||
let fields = struct_data.variant_data.fields();
|
let fields = struct_data.variant_data.fields();
|
||||||
let resolver = def.resolver(db);
|
let resolver = def.resolver(db);
|
||||||
let ctx =
|
let ctx =
|
||||||
|
@ -805,7 +803,7 @@ fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFn
|
||||||
|
|
||||||
/// Build the type of a tuple struct constructor.
|
/// Build the type of a tuple struct constructor.
|
||||||
fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Binders<Ty> {
|
fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Binders<Ty> {
|
||||||
let struct_data = db.struct_data(def.into());
|
let struct_data = db.struct_data(def);
|
||||||
if let StructKind::Unit = struct_data.variant_data.kind() {
|
if let StructKind::Unit = struct_data.variant_data.kind() {
|
||||||
return type_for_adt(db, def.into());
|
return type_for_adt(db, def.into());
|
||||||
}
|
}
|
||||||
|
@ -836,7 +834,7 @@ fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId)
|
||||||
}
|
}
|
||||||
let generics = generics(db, def.parent.into());
|
let generics = generics(db, def.parent.into());
|
||||||
let substs = Substs::bound_vars(&generics);
|
let substs = Substs::bound_vars(&generics);
|
||||||
Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(EnumVariantId::from(def).into()), substs))
|
Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Binders<Ty> {
|
fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Binders<Ty> {
|
||||||
|
@ -964,6 +962,6 @@ pub(crate) fn impl_trait_query(
|
||||||
let target_trait = impl_data.target_trait.as_ref()?;
|
let target_trait = impl_data.target_trait.as_ref()?;
|
||||||
Some(Binders::new(
|
Some(Binders::new(
|
||||||
self_ty.num_binders,
|
self_ty.num_binders,
|
||||||
TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value.clone()))?,
|
TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value))?,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
|
@ -214,7 +214,7 @@ pub fn iterate_method_candidates<T>(
|
||||||
// the methods by autoderef order of *receiver types*, not *self
|
// the methods by autoderef order of *receiver types*, not *self
|
||||||
// types*.
|
// types*.
|
||||||
|
|
||||||
let deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty.clone()).collect();
|
let deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty).collect();
|
||||||
for i in 0..deref_chain.len() {
|
for i in 0..deref_chain.len() {
|
||||||
if let Some(result) = iterate_method_candidates_with_autoref(
|
if let Some(result) = iterate_method_candidates_with_autoref(
|
||||||
&deref_chain[i..],
|
&deref_chain[i..],
|
||||||
|
@ -290,7 +290,7 @@ fn iterate_method_candidates_with_autoref<T>(
|
||||||
&ref_muted,
|
&ref_muted,
|
||||||
deref_chain,
|
deref_chain,
|
||||||
db,
|
db,
|
||||||
env.clone(),
|
env,
|
||||||
krate,
|
krate,
|
||||||
&traits_in_scope,
|
&traits_in_scope,
|
||||||
name,
|
name,
|
||||||
|
@ -391,17 +391,17 @@ fn iterate_trait_method_candidates<T>(
|
||||||
// iteration
|
// iteration
|
||||||
let mut known_implemented = false;
|
let mut known_implemented = false;
|
||||||
for (_name, item) in data.items.iter() {
|
for (_name, item) in data.items.iter() {
|
||||||
if !is_valid_candidate(db, name, receiver_ty, (*item).into(), self_ty) {
|
if !is_valid_candidate(db, name, receiver_ty, *item, self_ty) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if !known_implemented {
|
if !known_implemented {
|
||||||
let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone());
|
let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone());
|
||||||
if db.trait_solve(krate.into(), goal).is_none() {
|
if db.trait_solve(krate, goal).is_none() {
|
||||||
continue 'traits;
|
continue 'traits;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
known_implemented = true;
|
known_implemented = true;
|
||||||
if let Some(result) = callback(&self_ty.value, (*item).into()) {
|
if let Some(result) = callback(&self_ty.value, *item) {
|
||||||
return Some(result);
|
return Some(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -521,7 +521,7 @@ pub fn implements_trait(
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
let goal = generic_implements_goal(db, env, trait_, ty.clone());
|
let goal = generic_implements_goal(db, env, trait_, ty.clone());
|
||||||
let solution = db.trait_solve(krate.into(), goal);
|
let solution = db.trait_solve(krate, goal);
|
||||||
|
|
||||||
solution.is_some()
|
solution.is_some()
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,20 +30,18 @@ pub(super) fn binary_op_return_ty(op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Ty {
|
||||||
pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty {
|
pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty {
|
||||||
match op {
|
match op {
|
||||||
BinaryOp::LogicOp(..) => Ty::simple(TypeCtor::Bool),
|
BinaryOp::LogicOp(..) => Ty::simple(TypeCtor::Bool),
|
||||||
BinaryOp::Assignment { op: None } | BinaryOp::CmpOp(CmpOp::Eq { negated: _ }) => {
|
BinaryOp::Assignment { op: None } | BinaryOp::CmpOp(CmpOp::Eq { .. }) => match lhs_ty {
|
||||||
match lhs_ty {
|
Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
|
||||||
Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
|
TypeCtor::Int(..)
|
||||||
TypeCtor::Int(..)
|
| TypeCtor::Float(..)
|
||||||
| TypeCtor::Float(..)
|
| TypeCtor::Str
|
||||||
| TypeCtor::Str
|
| TypeCtor::Char
|
||||||
| TypeCtor::Char
|
| TypeCtor::Bool => lhs_ty,
|
||||||
| TypeCtor::Bool => lhs_ty,
|
|
||||||
_ => Ty::Unknown,
|
|
||||||
},
|
|
||||||
Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
|
|
||||||
_ => Ty::Unknown,
|
_ => Ty::Unknown,
|
||||||
}
|
},
|
||||||
}
|
Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
|
||||||
|
_ => Ty::Unknown,
|
||||||
|
},
|
||||||
BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => Ty::Unknown,
|
BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => Ty::Unknown,
|
||||||
BinaryOp::CmpOp(CmpOp::Ord { .. })
|
BinaryOp::CmpOp(CmpOp::Ord { .. })
|
||||||
| BinaryOp::Assignment { op: Some(_) }
|
| BinaryOp::Assignment { op: Some(_) }
|
||||||
|
|
|
@ -86,15 +86,14 @@ impl TestDB {
|
||||||
pub fn diagnostics(&self) -> String {
|
pub fn diagnostics(&self) -> String {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
let crate_graph = self.crate_graph();
|
let crate_graph = self.crate_graph();
|
||||||
for krate in crate_graph.iter().next() {
|
for krate in crate_graph.iter() {
|
||||||
let crate_def_map = self.crate_def_map(krate);
|
let crate_def_map = self.crate_def_map(krate);
|
||||||
|
|
||||||
let mut fns = Vec::new();
|
let mut fns = Vec::new();
|
||||||
for (module_id, _) in crate_def_map.modules.iter() {
|
for (module_id, _) in crate_def_map.modules.iter() {
|
||||||
for decl in crate_def_map[module_id].scope.declarations() {
|
for decl in crate_def_map[module_id].scope.declarations() {
|
||||||
match decl {
|
if let ModuleDefId::FunctionId(f) = decl {
|
||||||
ModuleDefId::FunctionId(f) => fns.push(f),
|
fns.push(f)
|
||||||
_ => (),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -101,9 +101,9 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
|
||||||
(src_ptr.value.range(), node.text().to_string().replace("\n", " "))
|
(src_ptr.value.range(), node.text().to_string().replace("\n", " "))
|
||||||
};
|
};
|
||||||
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
|
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
|
||||||
write!(
|
writeln!(
|
||||||
acc,
|
acc,
|
||||||
"{}{} '{}': {}\n",
|
"{}{} '{}': {}",
|
||||||
macro_prefix,
|
macro_prefix,
|
||||||
range,
|
range,
|
||||||
ellipsize(text, 15),
|
ellipsize(text, 15),
|
||||||
|
@ -118,9 +118,9 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
|
||||||
for (src_ptr, mismatch) in &mismatches {
|
for (src_ptr, mismatch) in &mismatches {
|
||||||
let range = src_ptr.value.range();
|
let range = src_ptr.value.range();
|
||||||
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
|
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
|
||||||
write!(
|
writeln!(
|
||||||
acc,
|
acc,
|
||||||
"{}{}: expected {}, got {}\n",
|
"{}{}: expected {}, got {}",
|
||||||
macro_prefix,
|
macro_prefix,
|
||||||
range,
|
range,
|
||||||
mismatch.expected.display(&db),
|
mismatch.expected.display(&db),
|
||||||
|
|
|
@ -248,12 +248,9 @@ fn solution_from_chalk(
|
||||||
let value = subst
|
let value = subst
|
||||||
.value
|
.value
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|p| {
|
.map(|p| match p.ty() {
|
||||||
let ty = match p.ty() {
|
Some(ty) => from_chalk(db, ty.clone()),
|
||||||
Some(ty) => from_chalk(db, ty.clone()),
|
None => unimplemented!(),
|
||||||
None => unimplemented!(),
|
|
||||||
};
|
|
||||||
ty
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
let result = Canonical { value, num_vars: subst.binders.len() };
|
let result = Canonical { value, num_vars: subst.binders.len() };
|
||||||
|
|
|
@ -98,7 +98,7 @@ fn closure_fn_trait_impl_datum(
|
||||||
// the existence of the Fn trait has been checked before
|
// the existence of the Fn trait has been checked before
|
||||||
.expect("fn trait for closure impl missing");
|
.expect("fn trait for closure impl missing");
|
||||||
|
|
||||||
let num_args: u16 = match &db.body(data.def.into())[data.expr] {
|
let num_args: u16 = match &db.body(data.def)[data.expr] {
|
||||||
Expr::Lambda { args, .. } => args.len() as u16,
|
Expr::Lambda { args, .. } => args.len() as u16,
|
||||||
_ => {
|
_ => {
|
||||||
log::warn!("closure for closure type {:?} not found", data);
|
log::warn!("closure for closure type {:?} not found", data);
|
||||||
|
@ -118,11 +118,11 @@ fn closure_fn_trait_impl_datum(
|
||||||
let self_ty = Ty::apply_one(TypeCtor::Closure { def: data.def, expr: data.expr }, sig_ty);
|
let self_ty = Ty::apply_one(TypeCtor::Closure { def: data.def, expr: data.expr }, sig_ty);
|
||||||
|
|
||||||
let trait_ref = TraitRef {
|
let trait_ref = TraitRef {
|
||||||
trait_: trait_.into(),
|
trait_,
|
||||||
substs: Substs::build_for_def(db, trait_).push(self_ty).push(arg_ty).build(),
|
substs: Substs::build_for_def(db, trait_).push(self_ty).push(arg_ty).build(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let output_ty_id = AssocTyValue::ClosureFnTraitImplOutput(data.clone());
|
let output_ty_id = AssocTyValue::ClosureFnTraitImplOutput(data);
|
||||||
|
|
||||||
BuiltinImplData {
|
BuiltinImplData {
|
||||||
num_vars: num_args as usize + 1,
|
num_vars: num_args as usize + 1,
|
||||||
|
@ -137,9 +137,9 @@ fn closure_fn_trait_output_assoc_ty_value(
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
data: super::ClosureFnTraitImplData,
|
data: super::ClosureFnTraitImplData,
|
||||||
) -> BuiltinImplAssocTyValueData {
|
) -> BuiltinImplAssocTyValueData {
|
||||||
let impl_ = Impl::ClosureFnTraitImpl(data.clone());
|
let impl_ = Impl::ClosureFnTraitImpl(data);
|
||||||
|
|
||||||
let num_args: u16 = match &db.body(data.def.into())[data.expr] {
|
let num_args: u16 = match &db.body(data.def)[data.expr] {
|
||||||
Expr::Lambda { args, .. } => args.len() as u16,
|
Expr::Lambda { args, .. } => args.len() as u16,
|
||||||
_ => {
|
_ => {
|
||||||
log::warn!("closure for closure type {:?} not found", data);
|
log::warn!("closure for closure type {:?} not found", data);
|
||||||
|
|
|
@ -409,8 +409,7 @@ where
|
||||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Canonical<T::Chalk> {
|
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Canonical<T::Chalk> {
|
||||||
let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT);
|
let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT);
|
||||||
let value = self.value.to_chalk(db);
|
let value = self.value.to_chalk(db);
|
||||||
let canonical = chalk_ir::Canonical { value, binders: vec![parameter; self.num_vars] };
|
chalk_ir::Canonical { value, binders: vec![parameter; self.num_vars] }
|
||||||
canonical
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_chalk(db: &impl HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> {
|
fn from_chalk(db: &impl HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> {
|
||||||
|
@ -565,10 +564,10 @@ where
|
||||||
// and will panic if the trait can't be resolved.
|
// and will panic if the trait can't be resolved.
|
||||||
let mut result: Vec<_> = self
|
let mut result: Vec<_> = self
|
||||||
.db
|
.db
|
||||||
.impls_for_trait(self.krate, trait_.into())
|
.impls_for_trait(self.krate, trait_)
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
.map(|it| Impl::ImplBlock(it.into()))
|
.map(Impl::ImplBlock)
|
||||||
.map(|impl_| impl_.to_chalk(self.db))
|
.map(|impl_| impl_.to_chalk(self.db))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
@ -586,7 +585,7 @@ where
|
||||||
false // FIXME
|
false // FIXME
|
||||||
}
|
}
|
||||||
fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
|
fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
|
||||||
self.db.associated_ty_value(self.krate.into(), id)
|
self.db.associated_ty_value(self.krate, id)
|
||||||
}
|
}
|
||||||
fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<TypeFamily>> {
|
fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<TypeFamily>> {
|
||||||
vec![]
|
vec![]
|
||||||
|
@ -674,7 +673,7 @@ pub(crate) fn struct_datum_query(
|
||||||
let where_clauses = type_ctor
|
let where_clauses = type_ctor
|
||||||
.as_generic_def()
|
.as_generic_def()
|
||||||
.map(|generic_def| {
|
.map(|generic_def| {
|
||||||
let generic_params = generics(db, generic_def.into());
|
let generic_params = generics(db, generic_def);
|
||||||
let bound_vars = Substs::bound_vars(&generic_params);
|
let bound_vars = Substs::bound_vars(&generic_params);
|
||||||
convert_where_clauses(db, generic_def, &bound_vars)
|
convert_where_clauses(db, generic_def, &bound_vars)
|
||||||
})
|
})
|
||||||
|
@ -805,7 +804,7 @@ fn type_alias_associated_ty_value(
|
||||||
let ty = db.ty(type_alias.into());
|
let ty = db.ty(type_alias.into());
|
||||||
let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) };
|
let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) };
|
||||||
let value = chalk_rust_ir::AssociatedTyValue {
|
let value = chalk_rust_ir::AssociatedTyValue {
|
||||||
impl_id: Impl::ImplBlock(impl_id.into()).to_chalk(db),
|
impl_id: Impl::ImplBlock(impl_id).to_chalk(db),
|
||||||
associated_ty_id: assoc_ty.to_chalk(db),
|
associated_ty_id: assoc_ty.to_chalk(db),
|
||||||
value: make_binders(value_bound, ty.num_binders),
|
value: make_binders(value_bound, ty.num_binders),
|
||||||
};
|
};
|
||||||
|
|
|
@ -11,15 +11,19 @@ doctest = false
|
||||||
wasm = []
|
wasm = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
either = "1.5"
|
either = "1.5.3"
|
||||||
format-buf = "1.0.0"
|
format-buf = "1.0.0"
|
||||||
indexmap = "1.3.0"
|
indexmap = "1.3.2"
|
||||||
itertools = "0.8.0"
|
itertools = "0.8.2"
|
||||||
join_to_string = "0.1.3"
|
join_to_string = "0.1.3"
|
||||||
log = "0.4.5"
|
log = "0.4.8"
|
||||||
rustc-hash = "1.0"
|
rayon = "1.3.0"
|
||||||
rand = { version = "0.7.0", features = ["small_rng"] }
|
fst = { version = "0.3.5", default-features = false }
|
||||||
once_cell = "1.2.0"
|
rustc-hash = "1.1.0"
|
||||||
|
unicase = "2.6.0"
|
||||||
|
superslice = "1.0.0"
|
||||||
|
rand = { version = "0.7.3", features = ["small_rng"] }
|
||||||
|
once_cell = "1.3.1"
|
||||||
|
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
ra_text_edit = { path = "../ra_text_edit" }
|
ra_text_edit = { path = "../ra_text_edit" }
|
||||||
|
@ -36,4 +40,4 @@ ra_assists = { path = "../ra_assists" }
|
||||||
hir = { path = "../ra_hir", package = "ra_hir" }
|
hir = { path = "../ra_hir", package = "ra_hir" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "0.13.0"
|
insta = "0.13.1"
|
||||||
|
|
|
@ -128,7 +128,7 @@ impl FnCallNode {
|
||||||
}),
|
}),
|
||||||
|
|
||||||
FnCallNode::MethodCallExpr(call_expr) => {
|
FnCallNode::MethodCallExpr(call_expr) => {
|
||||||
call_expr.syntax().children().filter_map(ast::NameRef::cast).nth(0)
|
call_expr.syntax().children().filter_map(ast::NameRef::cast).next()
|
||||||
}
|
}
|
||||||
|
|
||||||
FnCallNode::MacroCallExpr(call_expr) => call_expr.path()?.segment()?.name_ref(),
|
FnCallNode::MacroCallExpr(call_expr) => call_expr.path()?.segment()?.name_ref(),
|
||||||
|
|
|
@ -59,7 +59,7 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|node| node.parent())
|
.and_then(|node| node.parent())
|
||||||
.and_then(|node| node.parent())
|
.and_then(|node| node.parent())
|
||||||
.and_then(|node| ast::ImplBlock::cast(node));
|
.and_then(ast::ImplBlock::cast);
|
||||||
|
|
||||||
if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) {
|
if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) {
|
||||||
match trigger.kind() {
|
match trigger.kind() {
|
||||||
|
@ -110,17 +110,17 @@ fn add_function_impl(
|
||||||
ctx: &CompletionContext,
|
ctx: &CompletionContext,
|
||||||
func: &hir::Function,
|
func: &hir::Function,
|
||||||
) {
|
) {
|
||||||
let display = FunctionSignature::from_hir(ctx.db, func.clone());
|
let display = FunctionSignature::from_hir(ctx.db, *func);
|
||||||
|
|
||||||
let fn_name = func.name(ctx.db).to_string();
|
let fn_name = func.name(ctx.db).to_string();
|
||||||
|
|
||||||
let label = if func.params(ctx.db).len() > 0 {
|
let label = if !func.params(ctx.db).is_empty() {
|
||||||
format!("fn {}(..)", fn_name)
|
format!("fn {}(..)", fn_name)
|
||||||
} else {
|
} else {
|
||||||
format!("fn {}()", fn_name)
|
format!("fn {}()", fn_name)
|
||||||
};
|
};
|
||||||
|
|
||||||
let builder = CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label.clone())
|
let builder = CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label)
|
||||||
.lookup_by(fn_name)
|
.lookup_by(fn_name)
|
||||||
.set_documentation(func.docs(ctx.db));
|
.set_documentation(func.docs(ctx.db));
|
||||||
|
|
||||||
|
|
|
@ -159,7 +159,7 @@ impl CompletionItem {
|
||||||
|
|
||||||
/// Short one-line additional information, like a type
|
/// Short one-line additional information, like a type
|
||||||
pub fn detail(&self) -> Option<&str> {
|
pub fn detail(&self) -> Option<&str> {
|
||||||
self.detail.as_ref().map(|it| it.as_str())
|
self.detail.as_deref()
|
||||||
}
|
}
|
||||||
/// A doc-comment
|
/// A doc-comment
|
||||||
pub fn documentation(&self) -> Option<Documentation> {
|
pub fn documentation(&self) -> Option<Documentation> {
|
||||||
|
@ -167,7 +167,7 @@ impl CompletionItem {
|
||||||
}
|
}
|
||||||
/// What string is used for filtering.
|
/// What string is used for filtering.
|
||||||
pub fn lookup(&self) -> &str {
|
pub fn lookup(&self) -> &str {
|
||||||
self.lookup.as_ref().map(|it| it.as_str()).unwrap_or_else(|| self.label())
|
self.lookup.as_deref().unwrap_or_else(|| self.label())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn kind(&self) -> Option<CompletionItemKind> {
|
pub fn kind(&self) -> Option<CompletionItemKind> {
|
||||||
|
|
|
@ -54,9 +54,8 @@ impl FunctionSignature {
|
||||||
|
|
||||||
pub(crate) fn from_struct(db: &RootDatabase, st: hir::Struct) -> Option<Self> {
|
pub(crate) fn from_struct(db: &RootDatabase, st: hir::Struct) -> Option<Self> {
|
||||||
let node: ast::StructDef = st.source(db).value;
|
let node: ast::StructDef = st.source(db).value;
|
||||||
match node.kind() {
|
if let ast::StructKind::Record(_) = node.kind() {
|
||||||
ast::StructKind::Record(_) => return None,
|
return None;
|
||||||
_ => (),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let params = st
|
let params = st
|
||||||
|
|
|
@ -64,11 +64,11 @@ impl NavigationTarget {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn docs(&self) -> Option<&str> {
|
pub fn docs(&self) -> Option<&str> {
|
||||||
self.docs.as_ref().map(String::as_str)
|
self.docs.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn description(&self) -> Option<&str> {
|
pub fn description(&self) -> Option<&str> {
|
||||||
self.description.as_ref().map(String::as_str)
|
self.description.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A "most interesting" range withing the `full_range`.
|
/// A "most interesting" range withing the `full_range`.
|
||||||
|
|
|
@ -268,7 +268,7 @@ fn decl_access(
|
||||||
};
|
};
|
||||||
|
|
||||||
let stmt = find_node_at_offset::<ast::LetStmt>(syntax, range.start())?;
|
let stmt = find_node_at_offset::<ast::LetStmt>(syntax, range.start())?;
|
||||||
if let Some(_) = stmt.initializer() {
|
if stmt.initializer().is_some() {
|
||||||
let pat = stmt.pat()?;
|
let pat = stmt.pat()?;
|
||||||
if let ast::Pat::BindPat(it) = pat {
|
if let ast::Pat::BindPat(it) = pat {
|
||||||
if it.name()?.text().as_str() == name {
|
if it.name()?.text().as_str() == name {
|
||||||
|
|
|
@ -85,8 +85,11 @@ impl FromStr for SsrQuery {
|
||||||
fn from_str(query: &str) -> Result<SsrQuery, SsrError> {
|
fn from_str(query: &str) -> Result<SsrQuery, SsrError> {
|
||||||
let mut it = query.split("==>>");
|
let mut it = query.split("==>>");
|
||||||
let pattern = it.next().expect("at least empty string").trim();
|
let pattern = it.next().expect("at least empty string").trim();
|
||||||
let mut template =
|
let mut template = it
|
||||||
it.next().ok_or(SsrError("Cannot find delemiter `==>>`".into()))?.trim().to_string();
|
.next()
|
||||||
|
.ok_or_else(|| SsrError("Cannot find delemiter `==>>`".into()))?
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
if it.next().is_some() {
|
if it.next().is_some() {
|
||||||
return Err(SsrError("More than one delimiter found".into()));
|
return Err(SsrError("More than one delimiter found".into()));
|
||||||
}
|
}
|
||||||
|
@ -131,11 +134,12 @@ fn traverse(node: &SyntaxNode, go: &mut impl FnMut(&SyntaxNode) -> bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn split_by_var(s: &str) -> Result<(&str, &str, &str), SsrError> {
|
fn split_by_var(s: &str) -> Result<(&str, &str, &str), SsrError> {
|
||||||
let end_of_name = s.find(":").ok_or(SsrError("Use $<name>:expr".into()))?;
|
let end_of_name = s.find(':').ok_or_else(|| SsrError("Use $<name>:expr".into()))?;
|
||||||
let name = &s[0..end_of_name];
|
let name = &s[0..end_of_name];
|
||||||
is_name(name)?;
|
is_name(name)?;
|
||||||
let type_begin = end_of_name + 1;
|
let type_begin = end_of_name + 1;
|
||||||
let type_length = s[type_begin..].find(|c| !char::is_ascii_alphanumeric(&c)).unwrap_or(s.len());
|
let type_length =
|
||||||
|
s[type_begin..].find(|c| !char::is_ascii_alphanumeric(&c)).unwrap_or_else(|| s.len());
|
||||||
let type_name = &s[type_begin..type_begin + type_length];
|
let type_name = &s[type_begin..type_begin + type_length];
|
||||||
Ok((name, type_name, &s[type_begin + type_length..]))
|
Ok((name, type_name, &s[type_begin + type_length..]))
|
||||||
}
|
}
|
||||||
|
@ -182,7 +186,7 @@ fn find(pattern: &SsrPattern, code: &SyntaxNode) -> SsrMatches {
|
||||||
pattern.text() == code.text()
|
pattern.text() == code.text()
|
||||||
}
|
}
|
||||||
(SyntaxElement::Node(ref pattern), SyntaxElement::Node(ref code)) => {
|
(SyntaxElement::Node(ref pattern), SyntaxElement::Node(ref code)) => {
|
||||||
if placeholders.iter().find(|&n| n.0.as_str() == pattern.text()).is_some() {
|
if placeholders.iter().any(|n| n.0.as_str() == pattern.text()) {
|
||||||
match_.binding.insert(Var(pattern.text().to_string()), code.clone());
|
match_.binding.insert(Var(pattern.text().to_string()), code.clone());
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -11,17 +11,18 @@ doctest = false
|
||||||
wasm = []
|
wasm = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
either = "1.5"
|
either = "1.5.3"
|
||||||
format-buf = "1.0.0"
|
format-buf = "1.0.0"
|
||||||
indexmap = "1.3.0"
|
indexmap = "1.3.2"
|
||||||
itertools = "0.8.0"
|
itertools = "0.8.2"
|
||||||
join_to_string = "0.1.3"
|
join_to_string = "0.1.3"
|
||||||
log = "0.4.5"
|
log = "0.4.8"
|
||||||
rayon = "1.0.2"
|
rayon = "1.3.0"
|
||||||
fst = { version = "0.3.1", default-features = false }
|
fst = { version = "0.3.5", default-features = false }
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "1.1.0"
|
||||||
|
unicase = "2.6.0"
|
||||||
superslice = "1.0.0"
|
superslice = "1.0.0"
|
||||||
once_cell = "1.2.0"
|
once_cell = "1.3.1"
|
||||||
|
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
ra_text_edit = { path = "../ra_text_edit" }
|
ra_text_edit = { path = "../ra_text_edit" }
|
||||||
|
@ -36,4 +37,4 @@ test_utils = { path = "../test_utils" }
|
||||||
hir = { path = "../ra_hir", package = "ra_hir" }
|
hir = { path = "../ra_hir", package = "ra_hir" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "0.13.0"
|
insta = "0.13.1"
|
||||||
|
|
|
@ -44,7 +44,7 @@ impl fmt::Debug for AnalysisChange {
|
||||||
if !self.libraries_added.is_empty() {
|
if !self.libraries_added.is_empty() {
|
||||||
d.field("libraries_added", &self.libraries_added.len());
|
d.field("libraries_added", &self.libraries_added.len());
|
||||||
}
|
}
|
||||||
if !self.crate_graph.is_none() {
|
if self.crate_graph.is_some() {
|
||||||
d.field("crate_graph", &self.crate_graph);
|
d.field("crate_graph", &self.crate_graph);
|
||||||
}
|
}
|
||||||
d.finish()
|
d.finish()
|
||||||
|
|
|
@ -11,9 +11,9 @@ doctest = false
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
ra_parser = { path = "../ra_parser" }
|
ra_parser = { path = "../ra_parser" }
|
||||||
tt = { path = "../ra_tt", package = "ra_tt" }
|
tt = { path = "../ra_tt", package = "ra_tt" }
|
||||||
rustc-hash = "1.0.0"
|
rustc-hash = "1.1.0"
|
||||||
smallvec = "1.0.0"
|
smallvec = "1.2.0"
|
||||||
log = "0.4.5"
|
log = "0.4.8"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
|
|
|
@ -101,7 +101,7 @@ fn match_subtree(
|
||||||
tt::Leaf::Literal(tt::Literal { text: lhs, .. }),
|
tt::Leaf::Literal(tt::Literal { text: lhs, .. }),
|
||||||
tt::Leaf::Literal(tt::Literal { text: rhs, .. }),
|
tt::Leaf::Literal(tt::Literal { text: rhs, .. }),
|
||||||
) if lhs == rhs => (),
|
) if lhs == rhs => (),
|
||||||
_ => Err(ExpandError::UnexpectedToken)?,
|
_ => return Err(ExpandError::UnexpectedToken),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Op::TokenTree(tt::TokenTree::Subtree(lhs)) => {
|
Op::TokenTree(tt::TokenTree::Subtree(lhs)) => {
|
||||||
|
|
|
@ -45,15 +45,15 @@ impl PartialEq for Separator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_template<'a>(
|
pub(crate) fn parse_template(
|
||||||
template: &'a tt::Subtree,
|
template: &tt::Subtree,
|
||||||
) -> impl Iterator<Item = Result<Op<'a>, ExpandError>> {
|
) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> {
|
||||||
parse_inner(template, Mode::Template)
|
parse_inner(template, Mode::Template)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse_pattern<'a>(
|
pub(crate) fn parse_pattern(
|
||||||
pattern: &'a tt::Subtree,
|
pattern: &tt::Subtree,
|
||||||
) -> impl Iterator<Item = Result<Op<'a>, ExpandError>> {
|
) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> {
|
||||||
parse_inner(pattern, Mode::Pattern)
|
parse_inner(pattern, Mode::Pattern)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,10 +63,7 @@ enum Mode {
|
||||||
Template,
|
Template,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_inner<'a>(
|
fn parse_inner(src: &tt::Subtree, mode: Mode) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> {
|
||||||
src: &'a tt::Subtree,
|
|
||||||
mode: Mode,
|
|
||||||
) -> impl Iterator<Item = Result<Op<'a>, ExpandError>> {
|
|
||||||
let mut src = TtIter::new(src);
|
let mut src = TtIter::new(src);
|
||||||
std::iter::from_fn(move || {
|
std::iter::from_fn(move || {
|
||||||
let first = src.next()?;
|
let first = src.next()?;
|
||||||
|
@ -100,7 +97,7 @@ fn next_op<'a>(
|
||||||
Op::Repeat { subtree, separator, kind }
|
Op::Repeat { subtree, separator, kind }
|
||||||
}
|
}
|
||||||
tt::TokenTree::Leaf(leaf) => match leaf {
|
tt::TokenTree::Leaf(leaf) => match leaf {
|
||||||
tt::Leaf::Punct(..) => Err(ExpandError::UnexpectedToken)?,
|
tt::Leaf::Punct(..) => return Err(ExpandError::UnexpectedToken),
|
||||||
tt::Leaf::Ident(ident) => {
|
tt::Leaf::Ident(ident) => {
|
||||||
let name = &ident.text;
|
let name = &ident.text;
|
||||||
let kind = eat_fragment_kind(src, mode)?;
|
let kind = eat_fragment_kind(src, mode)?;
|
||||||
|
@ -147,15 +144,15 @@ fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), Exp
|
||||||
for tt in src {
|
for tt in src {
|
||||||
let tt = match tt {
|
let tt = match tt {
|
||||||
tt::TokenTree::Leaf(leaf) => leaf,
|
tt::TokenTree::Leaf(leaf) => leaf,
|
||||||
tt::TokenTree::Subtree(_) => Err(ExpandError::InvalidRepeat)?,
|
tt::TokenTree::Subtree(_) => return Err(ExpandError::InvalidRepeat),
|
||||||
};
|
};
|
||||||
let has_sep = match &separator {
|
let has_sep = match &separator {
|
||||||
Separator::Puncts(puncts) => puncts.len() != 0,
|
Separator::Puncts(puncts) => !puncts.is_empty(),
|
||||||
_ => true,
|
_ => true,
|
||||||
};
|
};
|
||||||
match tt {
|
match tt {
|
||||||
tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => {
|
tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => {
|
||||||
Err(ExpandError::InvalidRepeat)?
|
return Err(ExpandError::InvalidRepeat)
|
||||||
}
|
}
|
||||||
tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()),
|
tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()),
|
||||||
tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()),
|
tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()),
|
||||||
|
@ -168,11 +165,11 @@ fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), Exp
|
||||||
match &mut separator {
|
match &mut separator {
|
||||||
Separator::Puncts(puncts) => {
|
Separator::Puncts(puncts) => {
|
||||||
if puncts.len() == 3 {
|
if puncts.len() == 3 {
|
||||||
Err(ExpandError::InvalidRepeat)?
|
return Err(ExpandError::InvalidRepeat);
|
||||||
}
|
}
|
||||||
puncts.push(punct.clone())
|
puncts.push(punct.clone())
|
||||||
}
|
}
|
||||||
_ => Err(ExpandError::InvalidRepeat)?,
|
_ => return Err(ExpandError::InvalidRepeat),
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -124,7 +124,7 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
|
||||||
|
|
||||||
let idx = closing as usize;
|
let idx = closing as usize;
|
||||||
let kind = kinds[idx];
|
let kind = kinds[idx];
|
||||||
let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" };
|
let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" };
|
||||||
TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) }
|
TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -230,10 +230,8 @@ fn lambda_expr(p: &mut Parser) -> CompletedMarker {
|
||||||
p.eat(T![async]);
|
p.eat(T![async]);
|
||||||
p.eat(T![move]);
|
p.eat(T![move]);
|
||||||
params::param_list_closure(p);
|
params::param_list_closure(p);
|
||||||
if opt_fn_ret_type(p) {
|
if opt_fn_ret_type(p) && !p.at(T!['{']) {
|
||||||
if !p.at(T!['{']) {
|
p.error("expected `{`");
|
||||||
p.error("expected `{`");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.at_ts(EXPR_FIRST) {
|
if p.at_ts(EXPR_FIRST) {
|
||||||
|
|
|
@ -21,7 +21,7 @@ use super::*;
|
||||||
// struct S;
|
// struct S;
|
||||||
pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
|
pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) {
|
||||||
attributes::inner_attributes(p);
|
attributes::inner_attributes(p);
|
||||||
while !p.at(EOF) && !(stop_on_r_curly && p.at(T!['}'])) {
|
while !(stop_on_r_curly && p.at(T!['}']) || p.at(EOF)) {
|
||||||
item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod)
|
item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,7 +94,7 @@ fn path_segment(p: &mut Parser, mode: Mode, first: bool) {
|
||||||
|
|
||||||
fn opt_path_type_args(p: &mut Parser, mode: Mode) {
|
fn opt_path_type_args(p: &mut Parser, mode: Mode) {
|
||||||
match mode {
|
match mode {
|
||||||
Mode::Use => return,
|
Mode::Use => {}
|
||||||
Mode::Type => {
|
Mode::Type => {
|
||||||
// test path_fn_trait_args
|
// test path_fn_trait_args
|
||||||
// type F = Box<Fn(i32) -> ()>;
|
// type F = Box<Fn(i32) -> ()>;
|
||||||
|
|
|
@ -126,13 +126,13 @@ impl<'t> Parser<'t> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool {
|
fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool {
|
||||||
let t1 = self.token_source.lookahead_nth(n + 0);
|
let t1 = self.token_source.lookahead_nth(n);
|
||||||
let t2 = self.token_source.lookahead_nth(n + 1);
|
let t2 = self.token_source.lookahead_nth(n + 1);
|
||||||
t1.kind == k1 && t1.is_jointed_to_next && t2.kind == k2
|
t1.kind == k1 && t1.is_jointed_to_next && t2.kind == k2
|
||||||
}
|
}
|
||||||
|
|
||||||
fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool {
|
fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool {
|
||||||
let t1 = self.token_source.lookahead_nth(n + 0);
|
let t1 = self.token_source.lookahead_nth(n);
|
||||||
let t2 = self.token_source.lookahead_nth(n + 1);
|
let t2 = self.token_source.lookahead_nth(n + 1);
|
||||||
let t3 = self.token_source.lookahead_nth(n + 2);
|
let t3 = self.token_source.lookahead_nth(n + 2);
|
||||||
(t1.kind == k1 && t1.is_jointed_to_next)
|
(t1.kind == k1 && t1.is_jointed_to_next)
|
||||||
|
|
|
@ -9,13 +9,13 @@ publish = false
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
once_cell = "1.0.1"
|
once_cell = "1.3.1"
|
||||||
itertools = "0.8.0"
|
itertools = "0.8.2"
|
||||||
backtrace = "0.3.28"
|
backtrace = "0.3.44"
|
||||||
|
|
||||||
[target.'cfg(not(target_env = "msvc"))'.dependencies]
|
[target.'cfg(not(target_env = "msvc"))'.dependencies]
|
||||||
jemallocator = { version = "0.3.2", optional = true }
|
jemallocator = { version = "0.3.2", optional = true }
|
||||||
jemalloc-ctl = { version = "0.3.2", optional = true }
|
jemalloc-ctl = { version = "0.3.3", optional = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
jemalloc = [ "jemallocator", "jemalloc-ctl" ]
|
jemalloc = [ "jemallocator", "jemalloc-ctl" ]
|
||||||
|
|
|
@ -214,7 +214,7 @@ impl Drop for Profiler {
|
||||||
let start = stack.starts.pop().unwrap();
|
let start = stack.starts.pop().unwrap();
|
||||||
let duration = start.elapsed();
|
let duration = start.elapsed();
|
||||||
let level = stack.starts.len();
|
let level = stack.starts.len();
|
||||||
stack.messages.push(Message { level, duration, label: label });
|
stack.messages.push(Message { level, duration, label });
|
||||||
if level == 0 {
|
if level == 0 {
|
||||||
let stdout = stderr();
|
let stdout = stderr();
|
||||||
let longer_than = stack.filter_data.longer_than;
|
let longer_than = stack.filter_data.longer_than;
|
||||||
|
|
|
@ -8,16 +8,16 @@ authors = ["rust-analyzer developers"]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
log = "0.4.5"
|
log = "0.4.8"
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "1.1.0"
|
||||||
|
|
||||||
cargo_metadata = "0.9.0"
|
cargo_metadata = "0.9.1"
|
||||||
|
|
||||||
ra_arena = { path = "../ra_arena" }
|
ra_arena = { path = "../ra_arena" }
|
||||||
ra_db = { path = "../ra_db" }
|
ra_db = { path = "../ra_db" }
|
||||||
ra_cfg = { path = "../ra_cfg" }
|
ra_cfg = { path = "../ra_cfg" }
|
||||||
|
|
||||||
serde = { version = "1.0.89", features = ["derive"] }
|
serde = { version = "1.0.104", features = ["derive"] }
|
||||||
serde_json = "1.0.39"
|
serde_json = "1.0.48"
|
||||||
|
|
||||||
anyhow = "1.0.26"
|
anyhow = "1.0.26"
|
||||||
|
|
|
@ -164,7 +164,7 @@ impl CargoWorkspace {
|
||||||
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
|
// FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
|
||||||
// https://github.com/oli-obk/cargo_metadata/issues/79
|
// https://github.com/oli-obk/cargo_metadata/issues/79
|
||||||
meta.features(CargoOpt::NoDefaultFeatures);
|
meta.features(CargoOpt::NoDefaultFeatures);
|
||||||
} else if cargo_features.features.len() > 0 {
|
} else if !cargo_features.features.is_empty() {
|
||||||
meta.features(CargoOpt::SomeFeatures(cargo_features.features.clone()));
|
meta.features(CargoOpt::SomeFeatures(cargo_features.features.clone()));
|
||||||
}
|
}
|
||||||
if let Some(parent) = cargo_toml.parent() {
|
if let Some(parent) = cargo_toml.parent() {
|
||||||
|
@ -197,7 +197,7 @@ impl CargoWorkspace {
|
||||||
let pkg_data = &mut packages[pkg];
|
let pkg_data = &mut packages[pkg];
|
||||||
pkg_by_id.insert(id, pkg);
|
pkg_by_id.insert(id, pkg);
|
||||||
for meta_tgt in meta_pkg.targets {
|
for meta_tgt in meta_pkg.targets {
|
||||||
let is_proc_macro = meta_tgt.kind.as_slice() == &["proc-macro"];
|
let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"];
|
||||||
let tgt = targets.alloc(TargetData {
|
let tgt = targets.alloc(TargetData {
|
||||||
pkg,
|
pkg,
|
||||||
name: meta_tgt.name,
|
name: meta_tgt.name,
|
||||||
|
|
|
@ -197,8 +197,9 @@ impl ProjectWorkspace {
|
||||||
if let (Some(&from), Some(&to)) =
|
if let (Some(&from), Some(&to)) =
|
||||||
(crates.get(&from_crate_id), crates.get(&to_crate_id))
|
(crates.get(&from_crate_id), crates.get(&to_crate_id))
|
||||||
{
|
{
|
||||||
if let Err(_) =
|
if crate_graph
|
||||||
crate_graph.add_dep(from, CrateName::new(&dep.name).unwrap(), to)
|
.add_dep(from, CrateName::new(&dep.name).unwrap(), to)
|
||||||
|
.is_err()
|
||||||
{
|
{
|
||||||
log::error!(
|
log::error!(
|
||||||
"cyclic dependency {:?} -> {:?}",
|
"cyclic dependency {:?} -> {:?}",
|
||||||
|
@ -237,8 +238,7 @@ impl ProjectWorkspace {
|
||||||
if let (Some(&from), Some(&to)) =
|
if let (Some(&from), Some(&to)) =
|
||||||
(sysroot_crates.get(&from), sysroot_crates.get(&to))
|
(sysroot_crates.get(&from), sysroot_crates.get(&to))
|
||||||
{
|
{
|
||||||
if let Err(_) =
|
if crate_graph.add_dep(from, CrateName::new(name).unwrap(), to).is_err()
|
||||||
crate_graph.add_dep(from, CrateName::new(name).unwrap(), to)
|
|
||||||
{
|
{
|
||||||
log::error!("cyclic dependency between sysroot crates")
|
log::error!("cyclic dependency between sysroot crates")
|
||||||
}
|
}
|
||||||
|
@ -279,11 +279,14 @@ impl ProjectWorkspace {
|
||||||
}
|
}
|
||||||
if tgt.is_proc_macro(&cargo) {
|
if tgt.is_proc_macro(&cargo) {
|
||||||
if let Some(proc_macro) = libproc_macro {
|
if let Some(proc_macro) = libproc_macro {
|
||||||
if let Err(_) = crate_graph.add_dep(
|
if crate_graph
|
||||||
crate_id,
|
.add_dep(
|
||||||
CrateName::new("proc_macro").unwrap(),
|
crate_id,
|
||||||
proc_macro,
|
CrateName::new("proc_macro").unwrap(),
|
||||||
) {
|
proc_macro,
|
||||||
|
)
|
||||||
|
.is_err()
|
||||||
|
{
|
||||||
log::error!(
|
log::error!(
|
||||||
"cyclic dependency on proc_macro for {}",
|
"cyclic dependency on proc_macro for {}",
|
||||||
pkg.name(&cargo)
|
pkg.name(&cargo)
|
||||||
|
@ -299,15 +302,19 @@ impl ProjectWorkspace {
|
||||||
// Set deps to the core, std and to the lib target of the current package
|
// Set deps to the core, std and to the lib target of the current package
|
||||||
for &from in pkg_crates.get(&pkg).into_iter().flatten() {
|
for &from in pkg_crates.get(&pkg).into_iter().flatten() {
|
||||||
if let Some(to) = lib_tgt {
|
if let Some(to) = lib_tgt {
|
||||||
if to != from {
|
if to != from
|
||||||
if let Err(_) = crate_graph.add_dep(
|
&& crate_graph
|
||||||
from,
|
.add_dep(
|
||||||
// For root projects with dashes in their name,
|
from,
|
||||||
// cargo metadata does not do any normalization,
|
// For root projects with dashes in their name,
|
||||||
// so we do it ourselves currently
|
// cargo metadata does not do any normalization,
|
||||||
CrateName::normalize_dashes(pkg.name(&cargo)),
|
// so we do it ourselves currently
|
||||||
to,
|
CrateName::normalize_dashes(pkg.name(&cargo)),
|
||||||
) {
|
to,
|
||||||
|
)
|
||||||
|
.is_err()
|
||||||
|
{
|
||||||
|
{
|
||||||
log::error!(
|
log::error!(
|
||||||
"cyclic dependency between targets of {}",
|
"cyclic dependency between targets of {}",
|
||||||
pkg.name(&cargo)
|
pkg.name(&cargo)
|
||||||
|
@ -318,22 +325,25 @@ impl ProjectWorkspace {
|
||||||
// core is added as a dependency before std in order to
|
// core is added as a dependency before std in order to
|
||||||
// mimic rustcs dependency order
|
// mimic rustcs dependency order
|
||||||
if let Some(core) = libcore {
|
if let Some(core) = libcore {
|
||||||
if let Err(_) =
|
if crate_graph
|
||||||
crate_graph.add_dep(from, CrateName::new("core").unwrap(), core)
|
.add_dep(from, CrateName::new("core").unwrap(), core)
|
||||||
|
.is_err()
|
||||||
{
|
{
|
||||||
log::error!("cyclic dependency on core for {}", pkg.name(&cargo))
|
log::error!("cyclic dependency on core for {}", pkg.name(&cargo))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(alloc) = liballoc {
|
if let Some(alloc) = liballoc {
|
||||||
if let Err(_) =
|
if crate_graph
|
||||||
crate_graph.add_dep(from, CrateName::new("alloc").unwrap(), alloc)
|
.add_dep(from, CrateName::new("alloc").unwrap(), alloc)
|
||||||
|
.is_err()
|
||||||
{
|
{
|
||||||
log::error!("cyclic dependency on alloc for {}", pkg.name(&cargo))
|
log::error!("cyclic dependency on alloc for {}", pkg.name(&cargo))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(std) = libstd {
|
if let Some(std) = libstd {
|
||||||
if let Err(_) =
|
if crate_graph
|
||||||
crate_graph.add_dep(from, CrateName::new("std").unwrap(), std)
|
.add_dep(from, CrateName::new("std").unwrap(), std)
|
||||||
|
.is_err()
|
||||||
{
|
{
|
||||||
log::error!("cyclic dependency on std for {}", pkg.name(&cargo))
|
log::error!("cyclic dependency on std for {}", pkg.name(&cargo))
|
||||||
}
|
}
|
||||||
|
@ -347,11 +357,10 @@ impl ProjectWorkspace {
|
||||||
for dep in pkg.dependencies(&cargo) {
|
for dep in pkg.dependencies(&cargo) {
|
||||||
if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
|
if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
|
||||||
for &from in pkg_crates.get(&pkg).into_iter().flatten() {
|
for &from in pkg_crates.get(&pkg).into_iter().flatten() {
|
||||||
if let Err(_) = crate_graph.add_dep(
|
if crate_graph
|
||||||
from,
|
.add_dep(from, CrateName::new(&dep.name).unwrap(), to)
|
||||||
CrateName::new(&dep.name).unwrap(),
|
.is_err()
|
||||||
to,
|
{
|
||||||
) {
|
|
||||||
log::error!(
|
log::error!(
|
||||||
"cyclic dependency {} -> {}",
|
"cyclic dependency {} -> {}",
|
||||||
pkg.name(&cargo),
|
pkg.name(&cargo),
|
||||||
|
@ -409,7 +418,7 @@ fn find_cargo_toml(path: &Path) -> Result<PathBuf> {
|
||||||
}
|
}
|
||||||
curr = path.parent();
|
curr = path.parent();
|
||||||
}
|
}
|
||||||
Err(CargoTomlNotFoundError(path.to_path_buf()))?
|
Err(CargoTomlNotFoundError(path.to_path_buf()).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_rustc_cfg_options() -> CfgOptions {
|
pub fn get_rustc_cfg_options() -> CfgOptions {
|
||||||
|
|
|
@ -11,12 +11,12 @@ repository = "https://github.com/rust-analyzer/rust-analyzer"
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
itertools = "0.8.0"
|
itertools = "0.8.2"
|
||||||
rowan = "0.9.0"
|
rowan = "0.9.0"
|
||||||
rustc_lexer = "0.1.0"
|
rustc_lexer = "0.1.0"
|
||||||
rustc-hash = "1.0.1"
|
rustc-hash = "1.1.0"
|
||||||
arrayvec = "0.5.1"
|
arrayvec = "0.5.1"
|
||||||
once_cell = "1.2.0"
|
once_cell = "1.3.1"
|
||||||
|
|
||||||
ra_text_edit = { path = "../ra_text_edit" }
|
ra_text_edit = { path = "../ra_text_edit" }
|
||||||
ra_parser = { path = "../ra_parser" }
|
ra_parser = { path = "../ra_parser" }
|
||||||
|
@ -24,9 +24,9 @@ ra_parser = { path = "../ra_parser" }
|
||||||
# This crate transitively depends on `smol_str` via `rowan`.
|
# This crate transitively depends on `smol_str` via `rowan`.
|
||||||
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
|
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
|
||||||
# to reduce number of compilations
|
# to reduce number of compilations
|
||||||
smol_str = { version = "0.1.12", features = ["serde"] }
|
smol_str = { version = "0.1.15", features = ["serde"] }
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = { version = "1.0.104", features = ["derive"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
walkdir = "2.2.0"
|
walkdir = "2.3.1"
|
||||||
|
|
|
@ -95,16 +95,17 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
|
||||||
lhs: SyntaxElement,
|
lhs: SyntaxElement,
|
||||||
rhs: SyntaxElement,
|
rhs: SyntaxElement,
|
||||||
) {
|
) {
|
||||||
if lhs.kind() == rhs.kind() && lhs.text_range().len() == rhs.text_range().len() {
|
if lhs.kind() == rhs.kind()
|
||||||
if match (&lhs, &rhs) {
|
&& lhs.text_range().len() == rhs.text_range().len()
|
||||||
|
&& match (&lhs, &rhs) {
|
||||||
(NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
|
(NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
|
||||||
lhs.green() == rhs.green() || lhs.text() == rhs.text()
|
lhs.green() == rhs.green() || lhs.text() == rhs.text()
|
||||||
}
|
}
|
||||||
(NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
|
(NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
|
||||||
_ => false,
|
_ => false,
|
||||||
} {
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) {
|
if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) {
|
||||||
if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() {
|
if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() {
|
||||||
|
|
|
@ -30,7 +30,7 @@ pub enum ElseBranch {
|
||||||
|
|
||||||
impl ast::IfExpr {
|
impl ast::IfExpr {
|
||||||
pub fn then_branch(&self) -> Option<ast::BlockExpr> {
|
pub fn then_branch(&self) -> Option<ast::BlockExpr> {
|
||||||
self.blocks().nth(0)
|
self.blocks().next()
|
||||||
}
|
}
|
||||||
pub fn else_branch(&self) -> Option<ElseBranch> {
|
pub fn else_branch(&self) -> Option<ElseBranch> {
|
||||||
let res = match self.blocks().nth(1) {
|
let res = match self.blocks().nth(1) {
|
||||||
|
@ -208,7 +208,7 @@ impl ast::BinExpr {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lhs(&self) -> Option<ast::Expr> {
|
pub fn lhs(&self) -> Option<ast::Expr> {
|
||||||
children(self).nth(0)
|
children(self).next()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rhs(&self) -> Option<ast::Expr> {
|
pub fn rhs(&self) -> Option<ast::Expr> {
|
||||||
|
@ -271,7 +271,7 @@ impl ast::RangeExpr {
|
||||||
|
|
||||||
impl ast::IndexExpr {
|
impl ast::IndexExpr {
|
||||||
pub fn base(&self) -> Option<ast::Expr> {
|
pub fn base(&self) -> Option<ast::Expr> {
|
||||||
children(self).nth(0)
|
children(self).next()
|
||||||
}
|
}
|
||||||
pub fn index(&self) -> Option<ast::Expr> {
|
pub fn index(&self) -> Option<ast::Expr> {
|
||||||
children(self).nth(1)
|
children(self).nth(1)
|
||||||
|
@ -287,7 +287,7 @@ impl ast::ArrayExpr {
|
||||||
pub fn kind(&self) -> ArrayExprKind {
|
pub fn kind(&self) -> ArrayExprKind {
|
||||||
if self.is_repeat() {
|
if self.is_repeat() {
|
||||||
ArrayExprKind::Repeat {
|
ArrayExprKind::Repeat {
|
||||||
initializer: children(self).nth(0),
|
initializer: children(self).next(),
|
||||||
repeat: children(self).nth(1),
|
repeat: children(self).nth(1),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -328,10 +328,10 @@ impl ast::Literal {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn kind(&self) -> LiteralKind {
|
pub fn kind(&self) -> LiteralKind {
|
||||||
const INT_SUFFIXES: [&'static str; 12] = [
|
const INT_SUFFIXES: [&str; 12] = [
|
||||||
"u64", "u32", "u16", "u8", "usize", "isize", "i64", "i32", "i16", "i8", "u128", "i128",
|
"u64", "u32", "u16", "u8", "usize", "isize", "i64", "i32", "i16", "i8", "u128", "i128",
|
||||||
];
|
];
|
||||||
const FLOAT_SUFFIXES: [&'static str; 2] = ["f32", "f64"];
|
const FLOAT_SUFFIXES: [&str; 2] = ["f32", "f64"];
|
||||||
|
|
||||||
let token = self.token();
|
let token = self.token();
|
||||||
|
|
||||||
|
|
|
@ -152,7 +152,7 @@ pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::Mat
|
||||||
format!(" {}{}\n", arm.syntax(), comma)
|
format!(" {}{}\n", arm.syntax(), comma)
|
||||||
})
|
})
|
||||||
.collect::<String>();
|
.collect::<String>();
|
||||||
return from_text(&format!("{}", arms_str));
|
return from_text(&arms_str);
|
||||||
|
|
||||||
fn from_text(text: &str) -> ast::MatchArmList {
|
fn from_text(text: &str) -> ast::MatchArmList {
|
||||||
ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text))
|
ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text))
|
||||||
|
|
|
@ -48,7 +48,7 @@ impl<'t> TokenSource for TextTokenSource<'t> {
|
||||||
|
|
||||||
fn is_keyword(&self, kw: &str) -> bool {
|
fn is_keyword(&self, kw: &str) -> bool {
|
||||||
let pos = self.curr.1;
|
let pos = self.curr.1;
|
||||||
if !(pos < self.tokens.len()) {
|
if pos >= self.tokens.len() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len);
|
let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len);
|
||||||
|
|
|
@ -9,7 +9,7 @@ publish = false
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
text_unit = "0.1.6"
|
text_unit = "0.1.9"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
|
|
|
@ -10,4 +10,4 @@ doctest = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
|
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
|
||||||
# to reduce number of compilations
|
# to reduce number of compilations
|
||||||
smol_str = { version = "0.1.12", features = ["serde"] }
|
smol_str = { version = "0.1.15", features = ["serde"] }
|
||||||
|
|
|
@ -13,32 +13,32 @@ name = "rust-analyzer"
|
||||||
path = "./src/bin/main.rs"
|
path = "./src/bin/main.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0"
|
anyhow = "1.0.26"
|
||||||
crossbeam-channel = "0.4"
|
crossbeam-channel = "0.4.0"
|
||||||
either = "1.5"
|
either = "1.5.3"
|
||||||
env_logger = { version = "0.7.1", default-features = false }
|
env_logger = { version = "0.7.1", default-features = false }
|
||||||
globset = "0.4.4"
|
globset = "0.4.4"
|
||||||
itertools = "0.8.0"
|
itertools = "0.8.2"
|
||||||
jod-thread = "0.1.0"
|
jod-thread = "0.1.0"
|
||||||
log = "0.4.3"
|
log = "0.4.8"
|
||||||
lsp-types = { version = "0.70.0", features = ["proposed"] }
|
lsp-types = { version = "0.70.1", features = ["proposed"] }
|
||||||
parking_lot = "0.10.0"
|
parking_lot = "0.10.0"
|
||||||
pico-args = "0.3.0"
|
pico-args = "0.3.1"
|
||||||
rand = { version = "0.7.0", features = ["small_rng"] }
|
rand = { version = "0.7.3", features = ["small_rng"] }
|
||||||
relative-path = "1.0.0"
|
relative-path = "1.0.0"
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "1.1.0"
|
||||||
serde = { version = "1.0.83", features = ["derive"] }
|
serde = { version = "1.0.104", features = ["derive"] }
|
||||||
serde_json = "1.0.34"
|
serde_json = "1.0.48"
|
||||||
threadpool = "1.7.1"
|
threadpool = "1.7.1"
|
||||||
|
|
||||||
lsp-server = "0.3.0"
|
lsp-server = "0.3.1"
|
||||||
ra_cargo_watch = { path = "../ra_cargo_watch" }
|
ra_cargo_watch = { path = "../ra_cargo_watch" }
|
||||||
ra_ide = { path = "../ra_ide" }
|
ra_ide = { path = "../ra_ide" }
|
||||||
ra_prof = { path = "../ra_prof" }
|
ra_prof = { path = "../ra_prof" }
|
||||||
ra_project_model = { path = "../ra_project_model" }
|
ra_project_model = { path = "../ra_project_model" }
|
||||||
ra_syntax = { path = "../ra_syntax" }
|
ra_syntax = { path = "../ra_syntax" }
|
||||||
ra_text_edit = { path = "../ra_text_edit" }
|
ra_text_edit = { path = "../ra_text_edit" }
|
||||||
ra_vfs = "0.5.0"
|
ra_vfs = "0.5.2"
|
||||||
|
|
||||||
# This should only be used in CLI
|
# This should only be used in CLI
|
||||||
ra_db = { path = "../ra_db" }
|
ra_db = { path = "../ra_db" }
|
||||||
|
@ -48,10 +48,10 @@ hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" }
|
||||||
|
|
||||||
|
|
||||||
[target.'cfg(windows)'.dependencies]
|
[target.'cfg(windows)'.dependencies]
|
||||||
winapi = "0.3"
|
winapi = "0.3.8"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = "3"
|
tempfile = "3.1.0"
|
||||||
test_utils = { path = "../test_utils" }
|
test_utils = { path = "../test_utils" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
mod args;
|
mod args;
|
||||||
|
|
||||||
use lsp_server::Connection;
|
use lsp_server::Connection;
|
||||||
use ra_prof;
|
|
||||||
use rust_analyzer::{cli, from_json, show_message, Result, ServerConfig};
|
use rust_analyzer::{cli, from_json, show_message, Result, ServerConfig};
|
||||||
|
|
||||||
use crate::args::HelpPrinted;
|
use crate::args::HelpPrinted;
|
||||||
|
|
|
@ -130,7 +130,7 @@ pub fn analysis_stats(
|
||||||
write!(msg, " ({:?} {})", path, syntax_range).unwrap();
|
write!(msg, " ({:?} {})", path, syntax_range).unwrap();
|
||||||
}
|
}
|
||||||
if verbosity.is_spammy() {
|
if verbosity.is_spammy() {
|
||||||
bar.println(format!("{}", msg));
|
bar.println(msg.to_string());
|
||||||
}
|
}
|
||||||
bar.set_message(&msg);
|
bar.set_message(&msg);
|
||||||
let f_id = FunctionId::from(f);
|
let f_id = FunctionId::from(f);
|
||||||
|
|
|
@ -206,17 +206,17 @@ pub fn main_loop(
|
||||||
let event = select! {
|
let event = select! {
|
||||||
recv(&connection.receiver) -> msg => match msg {
|
recv(&connection.receiver) -> msg => match msg {
|
||||||
Ok(msg) => Event::Msg(msg),
|
Ok(msg) => Event::Msg(msg),
|
||||||
Err(RecvError) => Err("client exited without shutdown")?,
|
Err(RecvError) => return Err("client exited without shutdown".into()),
|
||||||
},
|
},
|
||||||
recv(task_receiver) -> task => Event::Task(task.unwrap()),
|
recv(task_receiver) -> task => Event::Task(task.unwrap()),
|
||||||
recv(world_state.task_receiver) -> task => match task {
|
recv(world_state.task_receiver) -> task => match task {
|
||||||
Ok(task) => Event::Vfs(task),
|
Ok(task) => Event::Vfs(task),
|
||||||
Err(RecvError) => Err("vfs died")?,
|
Err(RecvError) => return Err("vfs died".into()),
|
||||||
},
|
},
|
||||||
recv(libdata_receiver) -> data => Event::Lib(data.unwrap()),
|
recv(libdata_receiver) -> data => Event::Lib(data.unwrap()),
|
||||||
recv(world_state.check_watcher.task_recv) -> task => match task {
|
recv(world_state.check_watcher.task_recv) -> task => match task {
|
||||||
Ok(task) => Event::CheckWatcher(task),
|
Ok(task) => Event::CheckWatcher(task),
|
||||||
Err(RecvError) => Err("check watcher died")?,
|
Err(RecvError) => return Err("check watcher died".into()),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if let Event::Msg(Message::Request(req)) = &event {
|
if let Event::Msg(Message::Request(req)) = &event {
|
||||||
|
|
|
@ -17,7 +17,7 @@ use test_utils::skip_slow_tests;
|
||||||
|
|
||||||
use crate::support::{project, Project};
|
use crate::support::{project, Project};
|
||||||
|
|
||||||
const PROFILE: &'static str = "";
|
const PROFILE: &str = "";
|
||||||
// const PROFILE: &'static str = "*@3>100";
|
// const PROFILE: &'static str = "*@3>100";
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -52,7 +52,7 @@ impl<'a> Project<'a> {
|
||||||
let tmp_dir = self.tmp_dir.unwrap_or_else(|| TempDir::new().unwrap());
|
let tmp_dir = self.tmp_dir.unwrap_or_else(|| TempDir::new().unwrap());
|
||||||
static INIT: Once = Once::new();
|
static INIT: Once = Once::new();
|
||||||
INIT.call_once(|| {
|
INIT.call_once(|| {
|
||||||
let _ = env_logger::builder().is_test(true).try_init().unwrap();
|
env_logger::builder().is_test(true).try_init().unwrap();
|
||||||
ra_prof::set_filter(if crate::PROFILE.is_empty() {
|
ra_prof::set_filter(if crate::PROFILE.is_empty() {
|
||||||
ra_prof::Filter::disabled()
|
ra_prof::Filter::disabled()
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -9,5 +9,5 @@ doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
difference = "2.0.0"
|
difference = "2.0.0"
|
||||||
text_unit = "0.1.2"
|
text_unit = "0.1.9"
|
||||||
serde_json = "1.0.34"
|
serde_json = "1.0.48"
|
||||||
|
|
|
@ -279,7 +279,7 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a
|
||||||
return Some((expected, actual));
|
return Some((expected, actual));
|
||||||
}
|
}
|
||||||
|
|
||||||
l.values().zip(r.values()).filter_map(|(l, r)| find_mismatch(l, r)).nth(0)
|
l.values().zip(r.values()).filter_map(|(l, r)| find_mismatch(l, r)).next()
|
||||||
}
|
}
|
||||||
(&Null, &Null) => None,
|
(&Null, &Null) => None,
|
||||||
// magic string literal "{...}" acts as wildcard for any sub-JSON
|
// magic string literal "{...}" acts as wildcard for any sub-JSON
|
||||||
|
|
|
@ -9,8 +9,8 @@ publish = false
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
walkdir = "2.1.3"
|
walkdir = "2.3.1"
|
||||||
pico-args = "0.3.0"
|
pico-args = "0.3.1"
|
||||||
quote = "1.0.2"
|
quote = "1.0.2"
|
||||||
proc-macro2 = "1.0.1"
|
proc-macro2 = "1.0.8"
|
||||||
anyhow = "1.0.19"
|
anyhow = "1.0.26"
|
||||||
|
|
|
@ -94,8 +94,7 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
let installed_extensions;
|
let installed_extensions = if cfg!(unix) {
|
||||||
if cfg!(unix) {
|
|
||||||
run!("npm --version").context("`npm` is required to build the VS Code plugin")?;
|
run!("npm --version").context("`npm` is required to build the VS Code plugin")?;
|
||||||
run!("npm install")?;
|
run!("npm install")?;
|
||||||
|
|
||||||
|
@ -103,7 +102,7 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
|
||||||
|
|
||||||
let code = find_code(|bin| run!("{} --version", bin).is_ok())?;
|
let code = find_code(|bin| run!("{} --version", bin).is_ok())?;
|
||||||
run!("{} --install-extension rust-analyzer.vsix --force", code)?;
|
run!("{} --install-extension rust-analyzer.vsix --force", code)?;
|
||||||
installed_extensions = run!("{} --list-extensions", code; echo = false)?;
|
run!("{} --list-extensions", code; echo = false)?
|
||||||
} else {
|
} else {
|
||||||
run!("cmd.exe /c npm --version")
|
run!("cmd.exe /c npm --version")
|
||||||
.context("`npm` is required to build the VS Code plugin")?;
|
.context("`npm` is required to build the VS Code plugin")?;
|
||||||
|
@ -113,8 +112,8 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
|
||||||
|
|
||||||
let code = find_code(|bin| run!("cmd.exe /c {}.cmd --version", bin).is_ok())?;
|
let code = find_code(|bin| run!("cmd.exe /c {}.cmd --version", bin).is_ok())?;
|
||||||
run!(r"cmd.exe /c {}.cmd --install-extension rust-analyzer.vsix --force", code)?;
|
run!(r"cmd.exe /c {}.cmd --install-extension rust-analyzer.vsix --force", code)?;
|
||||||
installed_extensions = run!("cmd.exe /c {}.cmd --list-extensions", code; echo = false)?;
|
run!("cmd.exe /c {}.cmd --list-extensions", code; echo = false)?
|
||||||
}
|
};
|
||||||
|
|
||||||
if !installed_extensions.contains("rust-analyzer") {
|
if !installed_extensions.contains("rust-analyzer") {
|
||||||
bail!(
|
bail!(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue