mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Improve autocompletion by looking on the type and name
Signed-off-by: Benjamin Coenen <5719034+bnjjj@users.noreply.github.com>
This commit is contained in:
commit
93bfc2d05d
288 changed files with 3919 additions and 5417 deletions
17
.github/workflows/ci.yaml
vendored
17
.github/workflows/ci.yaml
vendored
|
@ -20,25 +20,14 @@ jobs:
|
|||
name: Audit Rust vulnerabilities
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- run: sudo chown -R $(whoami):$(id -ng) ~/.cargo/
|
||||
|
||||
- name: Cache cargo
|
||||
uses: actions/cache@v1
|
||||
- uses: actions-rs/install@v0.1
|
||||
with:
|
||||
path: ~/.cargo/
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
crate: cargo-audit
|
||||
use-tool-cache: true
|
||||
|
||||
- run: cargo install cargo-audit
|
||||
- run: cargo audit
|
||||
|
||||
rust:
|
||||
|
|
76
Cargo.lock
generated
76
Cargo.lock
generated
|
@ -424,6 +424,17 @@ dependencies = [
|
|||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "goblin"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ddd5e3132801a1ac34ac53b97acde50c4685414dd2f291b9ea52afa6f07468c8"
|
||||
dependencies = [
|
||||
"log",
|
||||
"plain",
|
||||
"scroll",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.3.1"
|
||||
|
@ -586,6 +597,15 @@ version = "0.2.68"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dea0c0405123bba743ee3f91f49b1c7cfb684eef0da0a50110f758ccf24cdff0"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c979a19ffb457f0273965c333053f3d586bf759bf7b683fbebc37f9a9ebedc4"
|
||||
dependencies = [
|
||||
"winapi 0.3.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linked-hash-map"
|
||||
version = "0.5.2"
|
||||
|
@ -594,9 +614,9 @@ checksum = "ae91b68aebc4ddb91978b11a1b02ddd8602a05ec19002801c5666000e05e0f83"
|
|||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.3.3"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79b2de95ecb4691949fea4716ca53cdbcfccb2c612e19644a8bad05edcf9f47b"
|
||||
checksum = "c4da24a77a3d8a6d4862d95f72e6fdb9c09a643ecdb402d754004a557f2bec75"
|
||||
dependencies = [
|
||||
"scopeguard",
|
||||
]
|
||||
|
@ -759,9 +779,9 @@ checksum = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
|
|||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.10.0"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "92e98c49ab0b7ce5b222f2cc9193fc4efe11c6d0bd4f648e374684a6857b1cfc"
|
||||
checksum = "6fdfcb5f20930a79e326f7ec992a9fdb5b7bd809254b1e735bdd5a99f78bee0d"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core",
|
||||
|
@ -769,9 +789,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.7.0"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7582838484df45743c8434fbff785e8edf260c28748353d44bc0da32e0ceabf1"
|
||||
checksum = "0e136c1904604defe99ce5fd71a28d473fa60a12255d511aa78a9ddf11237aeb"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cloudabi",
|
||||
|
@ -825,6 +845,12 @@ version = "0.3.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3ad1f1b834a05d42dae330066e9699a173b28185b3bdc3dbf14ca239585de8cc"
|
||||
|
||||
[[package]]
|
||||
name = "plain"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.6"
|
||||
|
@ -995,6 +1021,7 @@ dependencies = [
|
|||
"ra_prof",
|
||||
"ra_syntax",
|
||||
"rustc-hash",
|
||||
"scoped-tls",
|
||||
"smallvec",
|
||||
"stdx",
|
||||
"test_utils",
|
||||
|
@ -1081,10 +1108,13 @@ version = "0.1.0"
|
|||
dependencies = [
|
||||
"cargo_metadata",
|
||||
"difference",
|
||||
"goblin",
|
||||
"libloading",
|
||||
"ra_mbe",
|
||||
"ra_proc_macro",
|
||||
"ra_tt",
|
||||
"serde_derive",
|
||||
"test_utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1390,12 +1420,38 @@ dependencies = [
|
|||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scoped-tls"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "scroll"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abb2332cb595d33f7edd5700f4cbf94892e680c7f0ae56adab58a35190b66cb1"
|
||||
dependencies = [
|
||||
"scroll_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scroll_derive"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8584eea9b9ff42825b46faf46a8c24d2cff13ec152fa2a50df788b87c07ee28"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "0.9.0"
|
||||
|
@ -1474,9 +1530,9 @@ checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
|
|||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.2.0"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c2fb2ec9bcd216a5b0d0ccf31ab17b5ed1d627960edff65bbe95d3ce221cefc"
|
||||
checksum = "05720e22615919e4734f6a99ceae50d00226c3c5aca406e102ebc33298214e0a"
|
||||
|
||||
[[package]]
|
||||
name = "smol_str"
|
||||
|
@ -1524,9 +1580,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "termios"
|
||||
version = "0.3.1"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625"
|
||||
checksum = "6f0fcee7b24a25675de40d5bb4de6e41b0df07bc9856295e7e2b3a3600c400c2"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
|
|
@ -180,7 +180,7 @@ trait Trait<T> {
|
|||
}
|
||||
|
||||
impl Trait<u32> for () {
|
||||
fn foo(&self) -> u32 { unimplemented!() }
|
||||
fn foo(&self) -> u32 { todo!() }
|
||||
|
||||
}
|
||||
"#####,
|
||||
|
@ -606,6 +606,21 @@ impl Walrus {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_reorder_fields() {
|
||||
check(
|
||||
"reorder_fields",
|
||||
r#####"
|
||||
struct Foo {foo: i32, bar: i32};
|
||||
const test: Foo = <|>Foo {bar: 0, foo: 1}
|
||||
"#####,
|
||||
r#####"
|
||||
struct Foo {foo: i32, bar: i32};
|
||||
const test: Foo = Foo {foo: 1, bar: 0}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_replace_if_let_with_match() {
|
||||
check(
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ra_syntax::{
|
||||
ast::{self, AstNode, AstToken, NameOwner, TypeParamsOwner},
|
||||
ast::{self, AstNode, NameOwner, TypeParamsOwner},
|
||||
TextUnit,
|
||||
};
|
||||
use stdx::{format_to, SepBy};
|
||||
|
@ -42,7 +42,7 @@ pub(crate) fn add_impl(ctx: AssistCtx) -> Option<Assist> {
|
|||
if let Some(type_params) = type_params {
|
||||
let lifetime_params = type_params
|
||||
.lifetime_params()
|
||||
.filter_map(|it| it.lifetime())
|
||||
.filter_map(|it| it.lifetime_token())
|
||||
.map(|it| it.text().clone());
|
||||
let type_params =
|
||||
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());
|
||||
|
|
|
@ -40,7 +40,7 @@ enum AddMissingImplMembersMode {
|
|||
// }
|
||||
//
|
||||
// impl Trait<u32> for () {
|
||||
// fn foo(&self) -> u32 { unimplemented!() }
|
||||
// fn foo(&self) -> u32 { todo!() }
|
||||
//
|
||||
// }
|
||||
// ```
|
||||
|
@ -165,7 +165,7 @@ fn add_missing_impl_members_inner(
|
|||
|
||||
fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
|
||||
if fn_def.body().is_none() {
|
||||
fn_def.with_body(make::block_from_expr(make::expr_unimplemented()))
|
||||
fn_def.with_body(make::block_from_expr(make::expr_todo()))
|
||||
} else {
|
||||
fn_def
|
||||
}
|
||||
|
@ -215,8 +215,8 @@ impl Foo for S {
|
|||
fn bar(&self) {}
|
||||
<|>type Output;
|
||||
const CONST: usize = 42;
|
||||
fn foo(&self) { unimplemented!() }
|
||||
fn baz(&self) { unimplemented!() }
|
||||
fn foo(&self) { todo!() }
|
||||
fn baz(&self) { todo!() }
|
||||
|
||||
}",
|
||||
);
|
||||
|
@ -250,7 +250,7 @@ struct S;
|
|||
|
||||
impl Foo for S {
|
||||
fn bar(&self) {}
|
||||
<|>fn foo(&self) { unimplemented!() }
|
||||
<|>fn foo(&self) { todo!() }
|
||||
|
||||
}",
|
||||
);
|
||||
|
@ -268,7 +268,7 @@ impl Foo for S { <|> }",
|
|||
trait Foo { fn foo(&self); }
|
||||
struct S;
|
||||
impl Foo for S {
|
||||
<|>fn foo(&self) { unimplemented!() }
|
||||
<|>fn foo(&self) { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -285,7 +285,7 @@ impl Foo<u32> for S { <|> }",
|
|||
trait Foo<T> { fn foo(&self, t: T) -> &T; }
|
||||
struct S;
|
||||
impl Foo<u32> for S {
|
||||
<|>fn foo(&self, t: u32) -> &u32 { unimplemented!() }
|
||||
<|>fn foo(&self, t: u32) -> &u32 { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -302,7 +302,7 @@ impl<U> Foo<U> for S { <|> }",
|
|||
trait Foo<T> { fn foo(&self, t: T) -> &T; }
|
||||
struct S;
|
||||
impl<U> Foo<U> for S {
|
||||
<|>fn foo(&self, t: U) -> &U { unimplemented!() }
|
||||
<|>fn foo(&self, t: U) -> &U { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -319,7 +319,7 @@ impl Foo for S {}<|>",
|
|||
trait Foo { fn foo(&self); }
|
||||
struct S;
|
||||
impl Foo for S {
|
||||
<|>fn foo(&self) { unimplemented!() }
|
||||
<|>fn foo(&self) { todo!() }
|
||||
}",
|
||||
)
|
||||
}
|
||||
|
@ -342,7 +342,7 @@ mod foo {
|
|||
}
|
||||
struct S;
|
||||
impl foo::Foo for S {
|
||||
<|>fn foo(&self, bar: foo::Bar) { unimplemented!() }
|
||||
<|>fn foo(&self, bar: foo::Bar) { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -365,7 +365,7 @@ mod foo {
|
|||
}
|
||||
struct S;
|
||||
impl foo::Foo for S {
|
||||
<|>fn foo(&self, bar: foo::Bar<u32>) { unimplemented!() }
|
||||
<|>fn foo(&self, bar: foo::Bar<u32>) { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -388,7 +388,7 @@ mod foo {
|
|||
}
|
||||
struct S;
|
||||
impl foo::Foo<u32> for S {
|
||||
<|>fn foo(&self, bar: foo::Bar<u32>) { unimplemented!() }
|
||||
<|>fn foo(&self, bar: foo::Bar<u32>) { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -414,7 +414,7 @@ mod foo {
|
|||
struct Param;
|
||||
struct S;
|
||||
impl foo::Foo<Param> for S {
|
||||
<|>fn foo(&self, bar: Param) { unimplemented!() }
|
||||
<|>fn foo(&self, bar: Param) { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -439,7 +439,7 @@ mod foo {
|
|||
}
|
||||
struct S;
|
||||
impl foo::Foo for S {
|
||||
<|>fn foo(&self, bar: foo::Bar<u32>::Assoc) { unimplemented!() }
|
||||
<|>fn foo(&self, bar: foo::Bar<u32>::Assoc) { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -464,7 +464,7 @@ mod foo {
|
|||
}
|
||||
struct S;
|
||||
impl foo::Foo for S {
|
||||
<|>fn foo(&self, bar: foo::Bar<foo::Baz>) { unimplemented!() }
|
||||
<|>fn foo(&self, bar: foo::Bar<foo::Baz>) { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -487,7 +487,7 @@ mod foo {
|
|||
}
|
||||
struct S;
|
||||
impl foo::Foo for S {
|
||||
<|>fn foo(&self, bar: dyn Fn(u32) -> i32) { unimplemented!() }
|
||||
<|>fn foo(&self, bar: dyn Fn(u32) -> i32) { todo!() }
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
@ -544,7 +544,7 @@ trait Foo {
|
|||
struct S;
|
||||
impl Foo for S {
|
||||
<|>type Output;
|
||||
fn foo(&self) { unimplemented!() }
|
||||
fn foo(&self) { todo!() }
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
use hir::Adt;
|
||||
use ra_syntax::{
|
||||
ast::{
|
||||
self, AstNode, AstToken, NameOwner, StructKind, TypeAscriptionOwner, TypeParamsOwner,
|
||||
VisibilityOwner,
|
||||
self, AstNode, NameOwner, StructKind, TypeAscriptionOwner, TypeParamsOwner, VisibilityOwner,
|
||||
},
|
||||
TextUnit, T,
|
||||
};
|
||||
|
@ -106,7 +105,7 @@ fn generate_impl_text(strukt: &ast::StructDef, code: &str) -> String {
|
|||
if let Some(type_params) = type_params {
|
||||
let lifetime_params = type_params
|
||||
.lifetime_params()
|
||||
.filter_map(|it| it.lifetime())
|
||||
.filter_map(|it| it.lifetime_token())
|
||||
.map(|it| it.text().clone());
|
||||
let type_params =
|
||||
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());
|
||||
|
|
|
@ -29,7 +29,7 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
|
|||
ast::Pat::BindPat(pat) => pat,
|
||||
_ => return None,
|
||||
};
|
||||
if bind_pat.is_mutable() {
|
||||
if bind_pat.mut_token().is_some() {
|
||||
tested_by!(test_not_inline_mut_variable);
|
||||
return None;
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ pub(crate) fn introduce_variable(ctx: AssistCtx) -> Option<Assist> {
|
|||
};
|
||||
if is_full_stmt {
|
||||
tested_by!(test_introduce_var_expr_stmt);
|
||||
if !full_stmt.unwrap().has_semi() {
|
||||
if full_stmt.unwrap().semicolon_token().is_none() {
|
||||
buf.push_str(";");
|
||||
}
|
||||
edit.replace(expr.syntax().text_range(), buf);
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::iter::successors;
|
|||
use ra_syntax::{
|
||||
algo::{neighbor, SyntaxRewriter},
|
||||
ast::{self, edit::AstNodeEdit, make},
|
||||
AstNode, AstToken, Direction, InsertPosition, SyntaxElement, T,
|
||||
AstNode, Direction, InsertPosition, SyntaxElement, T,
|
||||
};
|
||||
|
||||
use crate::{Assist, AssistCtx, AssistId};
|
||||
|
@ -82,7 +82,7 @@ fn try_merge_trees(old: &ast::UseTree, new: &ast::UseTree) -> Option<ast::UseTre
|
|||
.filter(|it| it.kind() != T!['{'] && it.kind() != T!['}']),
|
||||
);
|
||||
let use_tree_list = lhs.use_tree_list()?;
|
||||
let pos = InsertPosition::Before(use_tree_list.r_curly()?.syntax().clone().into());
|
||||
let pos = InsertPosition::Before(use_tree_list.r_curly_token()?.into());
|
||||
let use_tree_list = use_tree_list.insert_children(pos, to_insert);
|
||||
Some(lhs.with_use_tree_list(use_tree_list))
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ use ra_syntax::{
|
|||
ast::{self, edit::AstNodeEdit, make, AstNode, NameOwner, TypeBoundsOwner},
|
||||
match_ast,
|
||||
SyntaxKind::*,
|
||||
T,
|
||||
};
|
||||
|
||||
use crate::{Assist, AssistCtx, AssistId};
|
||||
|
@ -42,7 +43,7 @@ pub(crate) fn move_bounds_to_where_clause(ctx: AssistCtx) -> Option<Assist> {
|
|||
ast::EnumDef(it) => it.variant_list()?.syntax().clone().into(),
|
||||
ast::StructDef(it) => {
|
||||
it.syntax().children_with_tokens()
|
||||
.find(|it| it.kind() == RECORD_FIELD_DEF_LIST || it.kind() == SEMI)?
|
||||
.find(|it| it.kind() == RECORD_FIELD_DEF_LIST || it.kind() == T![;])?
|
||||
},
|
||||
_ => return None
|
||||
}
|
||||
|
|
230
crates/ra_assists/src/handlers/reorder_fields.rs
Normal file
230
crates/ra_assists/src/handlers/reorder_fields.rs
Normal file
|
@ -0,0 +1,230 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use hir::{Adt, ModuleDef, PathResolution, Semantics, Struct};
|
||||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::{
|
||||
algo, ast,
|
||||
ast::{Name, Path, RecordLit, RecordPat},
|
||||
AstNode, SyntaxKind, SyntaxNode,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
assist_ctx::{Assist, AssistCtx},
|
||||
AssistId,
|
||||
};
|
||||
use ra_syntax::ast::{Expr, NameRef};
|
||||
|
||||
// Assist: reorder_fields
|
||||
//
|
||||
// Reorder the fields of record literals and record patterns in the same order as in
|
||||
// the definition.
|
||||
//
|
||||
// ```
|
||||
// struct Foo {foo: i32, bar: i32};
|
||||
// const test: Foo = <|>Foo {bar: 0, foo: 1}
|
||||
// ```
|
||||
// ->
|
||||
// ```
|
||||
// struct Foo {foo: i32, bar: i32};
|
||||
// const test: Foo = Foo {foo: 1, bar: 0}
|
||||
// ```
|
||||
//
|
||||
pub(crate) fn reorder_fields(ctx: AssistCtx) -> Option<Assist> {
|
||||
reorder::<RecordLit>(ctx.clone()).or_else(|| reorder::<RecordPat>(ctx))
|
||||
}
|
||||
|
||||
fn reorder<R: AstNode>(ctx: AssistCtx) -> Option<Assist> {
|
||||
let record = ctx.find_node_at_offset::<R>()?;
|
||||
let path = record.syntax().children().find_map(Path::cast)?;
|
||||
|
||||
let ranks = compute_fields_ranks(&path, &ctx)?;
|
||||
|
||||
let fields = get_fields(&record.syntax());
|
||||
let sorted_fields = sorted_by_rank(&fields, |node| {
|
||||
*ranks.get(&get_field_name(node)).unwrap_or(&usize::max_value())
|
||||
});
|
||||
|
||||
if sorted_fields == fields {
|
||||
return None;
|
||||
}
|
||||
|
||||
ctx.add_assist(AssistId("reorder_fields"), "Reorder record fields", |edit| {
|
||||
for (old, new) in fields.iter().zip(&sorted_fields) {
|
||||
algo::diff(old, new).into_text_edit(edit.text_edit_builder());
|
||||
}
|
||||
edit.target(record.syntax().text_range())
|
||||
})
|
||||
}
|
||||
|
||||
fn get_fields_kind(node: &SyntaxNode) -> Vec<SyntaxKind> {
|
||||
use SyntaxKind::*;
|
||||
match node.kind() {
|
||||
RECORD_LIT => vec![RECORD_FIELD],
|
||||
RECORD_PAT => vec![RECORD_FIELD_PAT, BIND_PAT],
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
fn get_field_name(node: &SyntaxNode) -> String {
|
||||
use SyntaxKind::*;
|
||||
match node.kind() {
|
||||
RECORD_FIELD => {
|
||||
if let Some(name) = node.children().find_map(NameRef::cast) {
|
||||
return name.to_string();
|
||||
}
|
||||
node.children().find_map(Expr::cast).map(|expr| expr.to_string()).unwrap_or_default()
|
||||
}
|
||||
BIND_PAT | RECORD_FIELD_PAT => {
|
||||
node.children().find_map(Name::cast).map(|n| n.to_string()).unwrap_or_default()
|
||||
}
|
||||
_ => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_fields(record: &SyntaxNode) -> Vec<SyntaxNode> {
|
||||
let kinds = get_fields_kind(record);
|
||||
record.children().flat_map(|n| n.children()).filter(|n| kinds.contains(&n.kind())).collect()
|
||||
}
|
||||
|
||||
fn sorted_by_rank(
|
||||
fields: &[SyntaxNode],
|
||||
get_rank: impl Fn(&SyntaxNode) -> usize,
|
||||
) -> Vec<SyntaxNode> {
|
||||
fields.iter().cloned().sorted_by_key(get_rank).collect()
|
||||
}
|
||||
|
||||
fn struct_definition(path: &ast::Path, sema: &Semantics<RootDatabase>) -> Option<Struct> {
|
||||
match sema.resolve_path(path) {
|
||||
Some(PathResolution::Def(ModuleDef::Adt(Adt::Struct(s)))) => Some(s),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_fields_ranks(path: &Path, ctx: &AssistCtx) -> Option<HashMap<String, usize>> {
|
||||
Some(
|
||||
struct_definition(path, ctx.sema)?
|
||||
.fields(ctx.db)
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, field)| (field.name(ctx.db).to_string(), idx))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::helpers::{check_assist, check_assist_not_applicable};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn not_applicable_if_sorted() {
|
||||
check_assist_not_applicable(
|
||||
reorder_fields,
|
||||
r#"
|
||||
struct Foo {
|
||||
foo: i32,
|
||||
bar: i32,
|
||||
}
|
||||
|
||||
const test: Foo = <|>Foo { foo: 0, bar: 0 };
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trivial_empty_fields() {
|
||||
check_assist_not_applicable(
|
||||
reorder_fields,
|
||||
r#"
|
||||
struct Foo {};
|
||||
const test: Foo = <|>Foo {}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reorder_struct_fields() {
|
||||
check_assist(
|
||||
reorder_fields,
|
||||
r#"
|
||||
struct Foo {foo: i32, bar: i32};
|
||||
const test: Foo = <|>Foo {bar: 0, foo: 1}
|
||||
"#,
|
||||
r#"
|
||||
struct Foo {foo: i32, bar: i32};
|
||||
const test: Foo = <|>Foo {foo: 1, bar: 0}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reorder_struct_pattern() {
|
||||
check_assist(
|
||||
reorder_fields,
|
||||
r#"
|
||||
struct Foo { foo: i64, bar: i64, baz: i64 }
|
||||
|
||||
fn f(f: Foo) -> {
|
||||
match f {
|
||||
<|>Foo { baz: 0, ref mut bar, .. } => (),
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Foo { foo: i64, bar: i64, baz: i64 }
|
||||
|
||||
fn f(f: Foo) -> {
|
||||
match f {
|
||||
<|>Foo { ref mut bar, baz: 0, .. } => (),
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reorder_with_extra_field() {
|
||||
check_assist(
|
||||
reorder_fields,
|
||||
r#"
|
||||
struct Foo {
|
||||
foo: String,
|
||||
bar: String,
|
||||
}
|
||||
|
||||
impl Foo {
|
||||
fn new() -> Foo {
|
||||
let foo = String::new();
|
||||
<|>Foo {
|
||||
bar: foo.clone(),
|
||||
extra: "Extra field",
|
||||
foo,
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Foo {
|
||||
foo: String,
|
||||
bar: String,
|
||||
}
|
||||
|
||||
impl Foo {
|
||||
fn new() -> Foo {
|
||||
let foo = String::new();
|
||||
<|>Foo {
|
||||
foo,
|
||||
bar: foo.clone(),
|
||||
extra: "Extra field",
|
||||
}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
}
|
|
@ -129,6 +129,7 @@ mod handlers {
|
|||
mod replace_unwrap_with_match;
|
||||
mod split_import;
|
||||
mod add_from_impl_for_enum;
|
||||
mod reorder_fields;
|
||||
|
||||
pub(crate) fn all() -> &'static [AssistHandler] {
|
||||
&[
|
||||
|
@ -170,6 +171,7 @@ mod handlers {
|
|||
// These are manually sorted for better priorities
|
||||
add_missing_impl_members::add_missing_impl_members,
|
||||
add_missing_impl_members::add_missing_default_members,
|
||||
reorder_fields::reorder_fields,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -139,7 +139,7 @@ impl SourceAnalyzer {
|
|||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
field: &ast::FieldExpr,
|
||||
) -> Option<crate::StructField> {
|
||||
) -> Option<StructField> {
|
||||
let expr_id = self.expr_id(db, &field.clone().into())?;
|
||||
self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
|
||||
}
|
||||
|
@ -148,21 +148,19 @@ impl SourceAnalyzer {
|
|||
&self,
|
||||
db: &dyn HirDatabase,
|
||||
field: &ast::RecordField,
|
||||
) -> Option<(crate::StructField, Option<Local>)> {
|
||||
let (expr_id, local) = match field.expr() {
|
||||
Some(it) => (self.expr_id(db, &it)?, None),
|
||||
None => {
|
||||
let src = InFile { file_id: self.file_id, value: field };
|
||||
let expr_id = self.body_source_map.as_ref()?.field_init_shorthand_expr(src)?;
|
||||
let local_name = field.name_ref()?.as_name();
|
||||
) -> Option<(StructField, Option<Local>)> {
|
||||
let expr = field.expr()?;
|
||||
let expr_id = self.expr_id(db, &expr)?;
|
||||
let local = if field.name_ref().is_some() {
|
||||
None
|
||||
} else {
|
||||
let local_name = field.field_name()?.as_name();
|
||||
let path = ModPath::from_segments(PathKind::Plain, once(local_name));
|
||||
let local = match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
|
||||
match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
|
||||
Some(ValueNs::LocalBinding(pat_id)) => {
|
||||
Some(Local { pat_id, parent: self.resolver.body_owner()? })
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
(expr_id, local)
|
||||
}
|
||||
};
|
||||
let struct_field = self.infer.as_ref()?.record_field_resolution(expr_id)?;
|
||||
|
@ -255,7 +253,7 @@ impl SourceAnalyzer {
|
|||
_ => return None,
|
||||
};
|
||||
|
||||
let (variant, missing_fields) =
|
||||
let (variant, missing_fields, _exhaustive) =
|
||||
record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
|
||||
let res = self.missing_fields(db, krate, substs, variant, missing_fields);
|
||||
Some(res)
|
||||
|
@ -319,8 +317,7 @@ fn scope_for_offset(
|
|||
if source.file_id != offset.file_id {
|
||||
return None;
|
||||
}
|
||||
let syntax_node_ptr =
|
||||
source.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
let syntax_node_ptr = source.value.syntax_node_ptr();
|
||||
Some((syntax_node_ptr, scope))
|
||||
})
|
||||
// find containing scope
|
||||
|
@ -399,8 +396,7 @@ fn adjust(
|
|||
if source.file_id != file_id {
|
||||
return None;
|
||||
}
|
||||
let syntax_node_ptr =
|
||||
source.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
let syntax_node_ptr = source.value.syntax_node_ptr();
|
||||
Some((syntax_node_ptr, scope))
|
||||
})
|
||||
.map(|(ptr, scope)| (ptr.range(), scope))
|
||||
|
|
|
@ -4,7 +4,6 @@ use std::sync::Arc;
|
|||
|
||||
use either::Either;
|
||||
use hir_expand::{
|
||||
hygiene::Hygiene,
|
||||
name::{AsName, Name},
|
||||
InFile,
|
||||
};
|
||||
|
@ -13,7 +12,7 @@ use ra_prof::profile;
|
|||
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner, VisibilityOwner};
|
||||
|
||||
use crate::{
|
||||
attr::Attrs, db::DefDatabase, src::HasChildSource, src::HasSource, trace::Trace,
|
||||
body::CfgExpander, db::DefDatabase, src::HasChildSource, src::HasSource, trace::Trace,
|
||||
type_ref::TypeRef, visibility::RawVisibility, EnumId, HasModule, LocalEnumVariantId,
|
||||
LocalStructFieldId, Lookup, ModuleId, StructId, UnionId, VariantId,
|
||||
};
|
||||
|
@ -125,8 +124,9 @@ fn lower_enum(
|
|||
|
||||
impl VariantData {
|
||||
fn new(db: &dyn DefDatabase, flavor: InFile<ast::StructKind>, module_id: ModuleId) -> Self {
|
||||
let mut expander = CfgExpander::new(db, flavor.file_id, module_id.krate);
|
||||
let mut trace = Trace::new_for_arena();
|
||||
match lower_struct(db, &mut trace, &flavor, module_id) {
|
||||
match lower_struct(db, &mut expander, &mut trace, &flavor) {
|
||||
StructKind::Tuple => VariantData::Tuple(trace.into_arena()),
|
||||
StructKind::Record => VariantData::Record(trace.into_arena()),
|
||||
StructKind::Unit => VariantData::Unit,
|
||||
|
@ -178,8 +178,9 @@ impl HasChildSource for VariantId {
|
|||
it.lookup(db).container.module(db),
|
||||
),
|
||||
};
|
||||
let mut expander = CfgExpander::new(db, src.file_id, module_id.krate);
|
||||
let mut trace = Trace::new_for_map();
|
||||
lower_struct(db, &mut trace, &src, module_id);
|
||||
lower_struct(db, &mut expander, &mut trace, &src);
|
||||
src.with_value(trace.into_map())
|
||||
}
|
||||
}
|
||||
|
@ -193,16 +194,15 @@ pub enum StructKind {
|
|||
|
||||
fn lower_struct(
|
||||
db: &dyn DefDatabase,
|
||||
expander: &mut CfgExpander,
|
||||
trace: &mut Trace<StructFieldData, Either<ast::TupleFieldDef, ast::RecordFieldDef>>,
|
||||
ast: &InFile<ast::StructKind>,
|
||||
module_id: ModuleId,
|
||||
) -> StructKind {
|
||||
let crate_graph = db.crate_graph();
|
||||
match &ast.value {
|
||||
ast::StructKind::Tuple(fl) => {
|
||||
for (i, fd) in fl.fields().enumerate() {
|
||||
let attrs = Attrs::new(&fd, &Hygiene::new(db.upcast(), ast.file_id));
|
||||
if !attrs.is_cfg_enabled(&crate_graph[module_id.krate].cfg_options) {
|
||||
let attrs = expander.parse_attrs(&fd);
|
||||
if !expander.is_cfg_enabled(&attrs) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -219,8 +219,8 @@ fn lower_struct(
|
|||
}
|
||||
ast::StructKind::Record(fl) => {
|
||||
for fd in fl.fields() {
|
||||
let attrs = Attrs::new(&fd, &Hygiene::new(db.upcast(), ast.file_id));
|
||||
if !attrs.is_cfg_enabled(&crate_graph[module_id.krate].cfg_options) {
|
||||
let attrs = expander.parse_attrs(&fd);
|
||||
if !expander.is_cfg_enabled(&attrs) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -93,6 +93,7 @@ impl Attrs {
|
|||
}
|
||||
|
||||
pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
|
||||
// FIXME: handle cfg_attr :-)
|
||||
self.by_key("cfg").tt_values().all(|tt| cfg_options.is_cfg_enabled(tt) != Some(false))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,11 +9,14 @@ use drop_bomb::DropBomb;
|
|||
use either::Either;
|
||||
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroDefId};
|
||||
use ra_arena::{map::ArenaMap, Arena};
|
||||
use ra_cfg::CfgOptions;
|
||||
use ra_db::CrateId;
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{ast, AstNode, AstPtr};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{
|
||||
attr::Attrs,
|
||||
db::DefDatabase,
|
||||
expr::{Expr, ExprId, Pat, PatId},
|
||||
item_scope::BuiltinShadowMode,
|
||||
|
@ -24,25 +27,59 @@ use crate::{
|
|||
AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId,
|
||||
};
|
||||
|
||||
/// A subser of Exander that only deals with cfg attributes. We only need it to
|
||||
/// avoid cyclic queries in crate def map during enum processing.
|
||||
pub(crate) struct CfgExpander {
|
||||
cfg_options: CfgOptions,
|
||||
hygiene: Hygiene,
|
||||
}
|
||||
|
||||
pub(crate) struct Expander {
|
||||
cfg_expander: CfgExpander,
|
||||
crate_def_map: Arc<CrateDefMap>,
|
||||
current_file_id: HirFileId,
|
||||
hygiene: Hygiene,
|
||||
ast_id_map: Arc<AstIdMap>,
|
||||
module: ModuleId,
|
||||
recursive_limit: usize,
|
||||
}
|
||||
|
||||
impl CfgExpander {
|
||||
pub(crate) fn new(
|
||||
db: &dyn DefDatabase,
|
||||
current_file_id: HirFileId,
|
||||
krate: CrateId,
|
||||
) -> CfgExpander {
|
||||
let hygiene = Hygiene::new(db.upcast(), current_file_id);
|
||||
let cfg_options = db.crate_graph()[krate].cfg_options.clone();
|
||||
CfgExpander { cfg_options, hygiene }
|
||||
}
|
||||
|
||||
pub(crate) fn parse_attrs(&self, owner: &dyn ast::AttrsOwner) -> Attrs {
|
||||
Attrs::new(owner, &self.hygiene)
|
||||
}
|
||||
|
||||
pub(crate) fn is_cfg_enabled(&self, attrs: &Attrs) -> bool {
|
||||
attrs.is_cfg_enabled(&self.cfg_options)
|
||||
}
|
||||
}
|
||||
|
||||
impl Expander {
|
||||
pub(crate) fn new(
|
||||
db: &dyn DefDatabase,
|
||||
current_file_id: HirFileId,
|
||||
module: ModuleId,
|
||||
) -> Expander {
|
||||
let cfg_expander = CfgExpander::new(db, current_file_id, module.krate);
|
||||
let crate_def_map = db.crate_def_map(module.krate);
|
||||
let hygiene = Hygiene::new(db.upcast(), current_file_id);
|
||||
let ast_id_map = db.ast_id_map(current_file_id);
|
||||
Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module, recursive_limit: 0 }
|
||||
Expander {
|
||||
cfg_expander,
|
||||
crate_def_map,
|
||||
current_file_id,
|
||||
ast_id_map,
|
||||
module,
|
||||
recursive_limit: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn enter_expand<T: ast::AstNode>(
|
||||
|
@ -75,7 +112,7 @@ impl Expander {
|
|||
ast_id_map: mem::take(&mut self.ast_id_map),
|
||||
bomb: DropBomb::new("expansion mark dropped"),
|
||||
};
|
||||
self.hygiene = Hygiene::new(db.upcast(), file_id);
|
||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id);
|
||||
self.current_file_id = file_id;
|
||||
self.ast_id_map = db.ast_id_map(file_id);
|
||||
self.recursive_limit += 1;
|
||||
|
@ -91,7 +128,7 @@ impl Expander {
|
|||
}
|
||||
|
||||
pub(crate) fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
||||
self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
|
||||
self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id);
|
||||
self.current_file_id = mark.file_id;
|
||||
self.ast_id_map = mem::take(&mut mark.ast_id_map);
|
||||
self.recursive_limit -= 1;
|
||||
|
@ -102,8 +139,16 @@ impl Expander {
|
|||
InFile { file_id: self.current_file_id, value }
|
||||
}
|
||||
|
||||
pub(crate) fn parse_attrs(&self, owner: &dyn ast::AttrsOwner) -> Attrs {
|
||||
self.cfg_expander.parse_attrs(owner)
|
||||
}
|
||||
|
||||
pub(crate) fn is_cfg_enabled(&self, attrs: &Attrs) -> bool {
|
||||
self.cfg_expander.is_cfg_enabled(attrs)
|
||||
}
|
||||
|
||||
fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
|
||||
Path::from_src(path, &self.hygiene)
|
||||
Path::from_src(path, &self.cfg_expander.hygiene)
|
||||
}
|
||||
|
||||
fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<MacroDefId> {
|
||||
|
@ -142,7 +187,7 @@ pub struct Body {
|
|||
pub item_scope: ItemScope,
|
||||
}
|
||||
|
||||
pub type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>;
|
||||
pub type ExprPtr = AstPtr<ast::Expr>;
|
||||
pub type ExprSource = InFile<ExprPtr>;
|
||||
|
||||
pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
|
||||
|
@ -236,11 +281,11 @@ impl Index<PatId> for Body {
|
|||
|
||||
impl BodySourceMap {
|
||||
pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprSource, SyntheticSyntax> {
|
||||
self.expr_map_back[expr]
|
||||
self.expr_map_back[expr].clone()
|
||||
}
|
||||
|
||||
pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprId> {
|
||||
let src = node.map(|it| Either::Left(AstPtr::new(it)));
|
||||
let src = node.map(|it| AstPtr::new(it));
|
||||
self.expr_map.get(&src).cloned()
|
||||
}
|
||||
|
||||
|
@ -249,13 +294,8 @@ impl BodySourceMap {
|
|||
self.expansions.get(&src).cloned()
|
||||
}
|
||||
|
||||
pub fn field_init_shorthand_expr(&self, node: InFile<&ast::RecordField>) -> Option<ExprId> {
|
||||
let src = node.map(|it| Either::Right(AstPtr::new(it)));
|
||||
self.expr_map.get(&src).cloned()
|
||||
}
|
||||
|
||||
pub fn pat_syntax(&self, pat: PatId) -> Result<PatSource, SyntheticSyntax> {
|
||||
self.pat_map_back[pat]
|
||||
self.pat_map_back[pat].clone()
|
||||
}
|
||||
|
||||
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
|
||||
|
@ -264,6 +304,6 @@ impl BodySourceMap {
|
|||
}
|
||||
|
||||
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::RecordField> {
|
||||
self.field_map[&(expr, field)]
|
||||
self.field_map[&(expr, field)].clone()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,9 +2,7 @@
|
|||
//! representation.
|
||||
|
||||
use either::Either;
|
||||
|
||||
use hir_expand::{
|
||||
hygiene::Hygiene,
|
||||
name::{name, AsName, Name},
|
||||
MacroDefId, MacroDefKind,
|
||||
};
|
||||
|
@ -18,10 +16,8 @@ use ra_syntax::{
|
|||
};
|
||||
use test_utils::tested_by;
|
||||
|
||||
use super::{ExprSource, PatSource};
|
||||
use crate::{
|
||||
adt::StructKind,
|
||||
attr::Attrs,
|
||||
body::{Body, BodySourceMap, Expander, PatPtr, SyntheticSyntax},
|
||||
builtin_type::{BuiltinFloat, BuiltinInt},
|
||||
db::DefDatabase,
|
||||
|
@ -31,12 +27,13 @@ use crate::{
|
|||
},
|
||||
item_scope::BuiltinShadowMode,
|
||||
path::GenericArgs,
|
||||
path::Path,
|
||||
type_ref::{Mutability, TypeRef},
|
||||
AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, HasModule, Intern,
|
||||
ModuleDefId, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
|
||||
AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId,
|
||||
StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
|
||||
};
|
||||
|
||||
use super::{ExprSource, PatSource};
|
||||
|
||||
pub(super) fn lower(
|
||||
db: &dyn DefDatabase,
|
||||
def: DefWithBodyId,
|
||||
|
@ -104,9 +101,8 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
|
||||
fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr<ast::Expr>) -> ExprId {
|
||||
let ptr = Either::Left(ptr);
|
||||
let src = self.expander.to_source(ptr);
|
||||
let id = self.make_expr(expr, Ok(src));
|
||||
let id = self.make_expr(expr, Ok(src.clone()));
|
||||
self.source_map.expr_map.insert(src, id);
|
||||
id
|
||||
}
|
||||
|
@ -115,13 +111,6 @@ impl ExprCollector<'_> {
|
|||
fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId {
|
||||
self.make_expr(expr, Err(SyntheticSyntax))
|
||||
}
|
||||
fn alloc_expr_field_shorthand(&mut self, expr: Expr, ptr: AstPtr<ast::RecordField>) -> ExprId {
|
||||
let ptr = Either::Right(ptr);
|
||||
let src = self.expander.to_source(ptr);
|
||||
let id = self.make_expr(expr, Ok(src));
|
||||
self.source_map.expr_map.insert(src, id);
|
||||
id
|
||||
}
|
||||
fn empty_block(&mut self) -> ExprId {
|
||||
self.alloc_expr_desugared(Expr::Block { statements: Vec::new(), tail: None })
|
||||
}
|
||||
|
@ -136,7 +125,7 @@ impl ExprCollector<'_> {
|
|||
|
||||
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
|
||||
let src = self.expander.to_source(ptr);
|
||||
let id = self.make_pat(pat, Ok(src));
|
||||
let id = self.make_pat(pat, Ok(src.clone()));
|
||||
self.source_map.pat_map.insert(src, id);
|
||||
id
|
||||
}
|
||||
|
@ -291,7 +280,7 @@ impl ExprCollector<'_> {
|
|||
ast::Expr::ParenExpr(e) => {
|
||||
let inner = self.collect_expr_opt(e.expr());
|
||||
// make the paren expr point to the inner expression as well
|
||||
let src = self.expander.to_source(Either::Left(syntax_ptr));
|
||||
let src = self.expander.to_source(syntax_ptr);
|
||||
self.source_map.expr_map.insert(src, inner);
|
||||
inner
|
||||
}
|
||||
|
@ -300,7 +289,6 @@ impl ExprCollector<'_> {
|
|||
self.alloc_expr(Expr::Return { expr }, syntax_ptr)
|
||||
}
|
||||
ast::Expr::RecordLit(e) => {
|
||||
let crate_graph = self.db.crate_graph();
|
||||
let path = e.path().and_then(|path| self.expander.parse_path(path));
|
||||
let mut field_ptrs = Vec::new();
|
||||
let record_lit = if let Some(nfl) = e.record_field_list() {
|
||||
|
@ -308,31 +296,17 @@ impl ExprCollector<'_> {
|
|||
.fields()
|
||||
.inspect(|field| field_ptrs.push(AstPtr::new(field)))
|
||||
.filter_map(|field| {
|
||||
let module_id = ContainerId::DefWithBodyId(self.def).module(self.db);
|
||||
let attrs = Attrs::new(
|
||||
&field,
|
||||
&Hygiene::new(self.db.upcast(), self.expander.current_file_id),
|
||||
);
|
||||
|
||||
if !attrs.is_cfg_enabled(&crate_graph[module_id.krate].cfg_options) {
|
||||
let attrs = self.expander.parse_attrs(&field);
|
||||
if !self.expander.is_cfg_enabled(&attrs) {
|
||||
return None;
|
||||
}
|
||||
let name = field.field_name()?.as_name();
|
||||
|
||||
Some(RecordLitField {
|
||||
name: field
|
||||
.name_ref()
|
||||
.map(|nr| nr.as_name())
|
||||
.unwrap_or_else(Name::missing),
|
||||
expr: if let Some(e) = field.expr() {
|
||||
self.collect_expr(e)
|
||||
} else if let Some(nr) = field.name_ref() {
|
||||
// field shorthand
|
||||
self.alloc_expr_field_shorthand(
|
||||
Expr::Path(Path::from_name_ref(&nr)),
|
||||
AstPtr::new(&field),
|
||||
)
|
||||
} else {
|
||||
self.missing_expr()
|
||||
name,
|
||||
expr: match field.expr() {
|
||||
Some(e) => self.collect_expr(e),
|
||||
None => self.missing_expr(),
|
||||
},
|
||||
})
|
||||
})
|
||||
|
@ -372,7 +346,7 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
ast::Expr::RefExpr(e) => {
|
||||
let expr = self.collect_expr_opt(e.expr());
|
||||
let mutability = Mutability::from_mutable(e.is_mut());
|
||||
let mutability = Mutability::from_mutable(e.mut_token().is_some());
|
||||
self.alloc_expr(Expr::Ref { expr, mutability }, syntax_ptr)
|
||||
}
|
||||
ast::Expr::PrefixExpr(e) => {
|
||||
|
@ -587,7 +561,8 @@ impl ExprCollector<'_> {
|
|||
let pattern = match &pat {
|
||||
ast::Pat::BindPat(bp) => {
|
||||
let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
|
||||
let annotation = BindingAnnotation::new(bp.is_mutable(), bp.is_ref());
|
||||
let annotation =
|
||||
BindingAnnotation::new(bp.mut_token().is_some(), bp.ref_token().is_some());
|
||||
let subpat = bp.pat().map(|subpat| self.collect_pat(subpat));
|
||||
if annotation == BindingAnnotation::Unannotated && subpat.is_none() {
|
||||
// This could also be a single-segment path pattern. To
|
||||
|
@ -628,7 +603,7 @@ impl ExprCollector<'_> {
|
|||
}
|
||||
ast::Pat::RefPat(p) => {
|
||||
let pat = self.collect_pat_opt(p.pat());
|
||||
let mutability = Mutability::from_mutable(p.is_mut());
|
||||
let mutability = Mutability::from_mutable(p.mut_token().is_some());
|
||||
Pat::Ref { pat, mutability }
|
||||
}
|
||||
ast::Pat::PathPat(p) => {
|
||||
|
@ -667,7 +642,9 @@ impl ExprCollector<'_> {
|
|||
});
|
||||
fields.extend(iter);
|
||||
|
||||
Pat::Record { path, args: fields }
|
||||
let ellipsis = record_field_pat_list.dotdot_token().is_some();
|
||||
|
||||
Pat::Record { path, args: fields, ellipsis }
|
||||
}
|
||||
ast::Pat::SlicePat(p) => {
|
||||
let SlicePatComponents { prefix, slice, suffix } = p.components();
|
||||
|
@ -688,7 +665,6 @@ impl ExprCollector<'_> {
|
|||
Pat::Missing
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: implement
|
||||
ast::Pat::BoxPat(_) | ast::Pat::RangePat(_) | ast::Pat::MacroPat(_) => Pat::Missing,
|
||||
};
|
||||
|
|
|
@ -20,7 +20,7 @@ use crate::{
|
|||
type_ref::{Mutability, TypeBound, TypeRef},
|
||||
visibility::RawVisibility,
|
||||
AssocContainerId, AssocItemId, ConstId, ConstLoc, Expander, FunctionId, FunctionLoc, HasModule,
|
||||
ImplId, Intern, Lookup, ModuleId, StaticId, TraitId, TypeAliasId, TypeAliasLoc,
|
||||
ImplId, Intern, Lookup, StaticId, TraitId, TypeAliasId, TypeAliasLoc,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
@ -74,7 +74,7 @@ impl FunctionData {
|
|||
TypeRef::unit()
|
||||
};
|
||||
|
||||
let ret_type = if src.value.is_async() {
|
||||
let ret_type = if src.value.async_token().is_some() {
|
||||
let future_impl = desugar_future_path(ret_type);
|
||||
let ty_bound = TypeBound::Path(future_impl);
|
||||
TypeRef::ImplTrait(vec![ty_bound])
|
||||
|
@ -135,7 +135,7 @@ impl TraitData {
|
|||
pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> {
|
||||
let src = tr.lookup(db).source(db);
|
||||
let name = src.value.name().map_or_else(Name::missing, |n| n.as_name());
|
||||
let auto = src.value.is_auto();
|
||||
let auto = src.value.auto_token().is_some();
|
||||
let ast_id_map = db.ast_id_map(src.file_id);
|
||||
|
||||
let container = AssocContainerId::TraitId(tr);
|
||||
|
@ -212,16 +212,23 @@ impl ImplData {
|
|||
|
||||
let target_trait = src.value.target_trait().map(TypeRef::from_ast);
|
||||
let target_type = TypeRef::from_ast_opt(src.value.target_type());
|
||||
let is_negative = src.value.is_negative();
|
||||
let is_negative = src.value.excl_token().is_some();
|
||||
let module_id = impl_loc.container.module(db);
|
||||
|
||||
let mut items = Vec::new();
|
||||
|
||||
if let Some(item_list) = src.value.item_list() {
|
||||
items.extend(collect_impl_items(db, item_list.impl_items(), src.file_id, id));
|
||||
let mut expander = Expander::new(db, impl_loc.ast_id.file_id, module_id);
|
||||
items.extend(collect_impl_items(
|
||||
db,
|
||||
&mut expander,
|
||||
item_list.impl_items(),
|
||||
src.file_id,
|
||||
id,
|
||||
));
|
||||
items.extend(collect_impl_items_in_macros(
|
||||
db,
|
||||
module_id,
|
||||
&mut expander,
|
||||
&src.with_value(item_list),
|
||||
id,
|
||||
));
|
||||
|
@ -268,18 +275,17 @@ impl ConstData {
|
|||
|
||||
fn collect_impl_items_in_macros(
|
||||
db: &dyn DefDatabase,
|
||||
module_id: ModuleId,
|
||||
expander: &mut Expander,
|
||||
impl_def: &InFile<ast::ItemList>,
|
||||
id: ImplId,
|
||||
) -> Vec<AssocItemId> {
|
||||
let mut expander = Expander::new(db, impl_def.file_id, module_id);
|
||||
let mut res = Vec::new();
|
||||
|
||||
// We set a limit to protect against infinite recursion
|
||||
let limit = 100;
|
||||
|
||||
for m in impl_def.value.syntax().children().filter_map(ast::MacroCall::cast) {
|
||||
res.extend(collect_impl_items_in_macro(db, &mut expander, m, id, limit))
|
||||
res.extend(collect_impl_items_in_macro(db, expander, m, id, limit))
|
||||
}
|
||||
|
||||
res
|
||||
|
@ -300,6 +306,7 @@ fn collect_impl_items_in_macro(
|
|||
let items: InFile<ast::MacroItems> = expander.to_source(items);
|
||||
let mut res = collect_impl_items(
|
||||
db,
|
||||
expander,
|
||||
items.value.items().filter_map(|it| ImplItem::cast(it.syntax().clone())),
|
||||
items.file_id,
|
||||
id,
|
||||
|
@ -319,33 +326,27 @@ fn collect_impl_items_in_macro(
|
|||
|
||||
fn collect_impl_items(
|
||||
db: &dyn DefDatabase,
|
||||
expander: &mut Expander,
|
||||
impl_items: impl Iterator<Item = ImplItem>,
|
||||
file_id: crate::HirFileId,
|
||||
id: ImplId,
|
||||
) -> Vec<AssocItemId> {
|
||||
let items = db.ast_id_map(file_id);
|
||||
let crate_graph = db.crate_graph();
|
||||
let module_id = id.lookup(db).container.module(db);
|
||||
|
||||
impl_items
|
||||
.filter_map(|item_node| match item_node {
|
||||
ast::ImplItem::FnDef(it) => {
|
||||
let attrs = expander.parse_attrs(&it);
|
||||
if !expander.is_cfg_enabled(&attrs) {
|
||||
return None;
|
||||
}
|
||||
let def = FunctionLoc {
|
||||
container: AssocContainerId::ImplId(id),
|
||||
ast_id: AstId::new(file_id, items.ast_id(&it)),
|
||||
}
|
||||
.intern(db);
|
||||
|
||||
if !db
|
||||
.function_data(def)
|
||||
.attrs
|
||||
.is_cfg_enabled(&crate_graph[module_id.krate].cfg_options)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
Some(def.into())
|
||||
}
|
||||
}
|
||||
ast::ImplItem::ConstDef(it) => {
|
||||
let def = ConstLoc {
|
||||
container: AssocContainerId::ImplId(id),
|
||||
|
|
|
@ -20,7 +20,7 @@ impl Diagnostic for UnresolvedModule {
|
|||
"unresolved module".to_string()
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.decl.into() }
|
||||
InFile { file_id: self.file, value: self.decl.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
|
|
|
@ -376,35 +376,14 @@ pub enum Pat {
|
|||
Wild,
|
||||
Tuple(Vec<PatId>),
|
||||
Or(Vec<PatId>),
|
||||
Record {
|
||||
path: Option<Path>,
|
||||
args: Vec<RecordFieldPat>,
|
||||
// FIXME: 'ellipsis' option
|
||||
},
|
||||
Range {
|
||||
start: ExprId,
|
||||
end: ExprId,
|
||||
},
|
||||
Slice {
|
||||
prefix: Vec<PatId>,
|
||||
slice: Option<PatId>,
|
||||
suffix: Vec<PatId>,
|
||||
},
|
||||
Record { path: Option<Path>, args: Vec<RecordFieldPat>, ellipsis: bool },
|
||||
Range { start: ExprId, end: ExprId },
|
||||
Slice { prefix: Vec<PatId>, slice: Option<PatId>, suffix: Vec<PatId> },
|
||||
Path(Path),
|
||||
Lit(ExprId),
|
||||
Bind {
|
||||
mode: BindingAnnotation,
|
||||
name: Name,
|
||||
subpat: Option<PatId>,
|
||||
},
|
||||
TupleStruct {
|
||||
path: Option<Path>,
|
||||
args: Vec<PatId>,
|
||||
},
|
||||
Ref {
|
||||
pat: PatId,
|
||||
mutability: Mutability,
|
||||
},
|
||||
Bind { mode: BindingAnnotation, name: Name, subpat: Option<PatId> },
|
||||
TupleStruct { path: Option<Path>, args: Vec<PatId> },
|
||||
Ref { pat: PatId, mutability: Mutability },
|
||||
}
|
||||
|
||||
impl Pat {
|
||||
|
|
|
@ -194,7 +194,7 @@ impl GenericParams {
|
|||
}
|
||||
|
||||
fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) {
|
||||
if bound.has_question_mark() {
|
||||
if bound.question_token().is_some() {
|
||||
// FIXME: remove this bound
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
//! features, such as Fn family of traits.
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::SmolStr;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
|
@ -78,6 +79,8 @@ impl LangItems {
|
|||
|
||||
/// Salsa query. This will look for lang items in a specific crate.
|
||||
pub(crate) fn crate_lang_items_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<LangItems> {
|
||||
let _p = profile("crate_lang_items_query");
|
||||
|
||||
let mut lang_items = LangItems::default();
|
||||
|
||||
let crate_def_map = db.crate_def_map(krate);
|
||||
|
@ -95,6 +98,7 @@ impl LangItems {
|
|||
db: &dyn DefDatabase,
|
||||
module: ModuleId,
|
||||
) -> Option<Arc<LangItems>> {
|
||||
let _p = profile("module_lang_items_query");
|
||||
let mut lang_items = LangItems::default();
|
||||
lang_items.collect_lang_items(db, module);
|
||||
if lang_items.items.is_empty() {
|
||||
|
@ -111,6 +115,7 @@ impl LangItems {
|
|||
start_crate: CrateId,
|
||||
item: SmolStr,
|
||||
) -> Option<LangItemTarget> {
|
||||
let _p = profile("lang_item_query");
|
||||
let lang_items = db.crate_lang_items(start_crate);
|
||||
let start_crate_target = lang_items.items.get(&item);
|
||||
if let Some(target) = start_crate_target {
|
||||
|
|
|
@ -462,6 +462,14 @@ impl DefCollector<'_> {
|
|||
Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => {
|
||||
tested_by!(glob_enum);
|
||||
// glob import from enum => just import all the variants
|
||||
|
||||
// XXX: urgh, so this works by accident! Here, we look at
|
||||
// the enum data, and, in theory, this might require us to
|
||||
// look back at the crate_def_map, creating a cycle. For
|
||||
// example, `enum E { crate::some_macro!(); }`. Luckely, the
|
||||
// only kind of macro that is allowed inside enum is a
|
||||
// `cfg_macro`, and we don't need to run name resolution for
|
||||
// it, but this is sheer luck!
|
||||
let enum_data = self.db.enum_data(e);
|
||||
let resolutions = enum_data
|
||||
.variants
|
||||
|
@ -977,11 +985,7 @@ impl ModCollector<'_, '_> {
|
|||
}
|
||||
|
||||
fn is_cfg_enabled(&self, attrs: &Attrs) -> bool {
|
||||
// FIXME: handle cfg_attr :-)
|
||||
attrs
|
||||
.by_key("cfg")
|
||||
.tt_values()
|
||||
.all(|tt| self.def_collector.cfg_options.is_cfg_enabled(tt) != Some(false))
|
||||
attrs.is_cfg_enabled(self.def_collector.cfg_options)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -287,7 +287,7 @@ impl RawItemsCollector {
|
|||
let visibility = RawVisibility::from_ast_with_hygiene(module.visibility(), &self.hygiene);
|
||||
|
||||
let ast_id = self.source_ast_id_map.ast_id(&module);
|
||||
if module.has_semi() {
|
||||
if module.semicolon_token().is_some() {
|
||||
let item =
|
||||
self.raw_items.modules.alloc(ModuleData::Declaration { name, visibility, ast_id });
|
||||
self.push_item(current_module, attrs, RawItemKind::Module(item));
|
||||
|
|
|
@ -32,6 +32,9 @@ fn typing_inside_a_function_should_not_invalidate_def_map() {
|
|||
|
||||
use crate::foo::bar::Baz;
|
||||
|
||||
enum E { A, B }
|
||||
use E::*;
|
||||
|
||||
fn foo() -> i32 {
|
||||
1 + 1
|
||||
}
|
||||
|
@ -46,6 +49,9 @@ fn typing_inside_a_function_should_not_invalidate_def_map() {
|
|||
|
||||
use crate::foo::bar::Baz;
|
||||
|
||||
enum E { A, B }
|
||||
use E::*;
|
||||
|
||||
fn foo() -> i32 { 92 }
|
||||
",
|
||||
);
|
||||
|
|
|
@ -134,11 +134,6 @@ impl Path {
|
|||
lower::lower_path(path, hygiene)
|
||||
}
|
||||
|
||||
/// Converts an `ast::NameRef` into a single-identifier `Path`.
|
||||
pub(crate) fn from_name_ref(name_ref: &ast::NameRef) -> Path {
|
||||
Path { type_anchor: None, mod_path: name_ref.as_name().into(), generic_args: vec![None] }
|
||||
}
|
||||
|
||||
/// Converts a known mod path to `Path`.
|
||||
pub(crate) fn from_known_path(
|
||||
path: ModPath,
|
||||
|
|
|
@ -28,7 +28,7 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
|
|||
loop {
|
||||
let segment = path.segment()?;
|
||||
|
||||
if segment.coloncolon().is_some() {
|
||||
if segment.coloncolon_token().is_some() {
|
||||
kind = PathKind::Abs;
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ pub(crate) fn lower_use_tree(
|
|||
let alias = tree.alias().map(|a| {
|
||||
a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
|
||||
});
|
||||
let is_glob = tree.star().is_some();
|
||||
let is_glob = tree.star_token().is_some();
|
||||
if let Some(ast_path) = tree.path() {
|
||||
// Handle self in a path.
|
||||
// E.g. `use something::{self, <...>}`
|
||||
|
|
|
@ -77,7 +77,7 @@ impl TypeRef {
|
|||
}
|
||||
ast::TypeRef::PointerType(inner) => {
|
||||
let inner_ty = TypeRef::from_ast_opt(inner.type_ref());
|
||||
let mutability = Mutability::from_mutable(inner.is_mut());
|
||||
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
|
||||
TypeRef::RawPtr(Box::new(inner_ty), mutability)
|
||||
}
|
||||
ast::TypeRef::ArrayType(inner) => {
|
||||
|
@ -88,7 +88,7 @@ impl TypeRef {
|
|||
}
|
||||
ast::TypeRef::ReferenceType(inner) => {
|
||||
let inner_ty = TypeRef::from_ast_opt(inner.type_ref());
|
||||
let mutability = Mutability::from_mutable(inner.is_mut());
|
||||
let mutability = Mutability::from_mutable(inner.mut_token().is_some());
|
||||
TypeRef::Reference(Box::new(inner_ty), mutability)
|
||||
}
|
||||
ast::TypeRef::PlaceholderType(_inner) => TypeRef::Placeholder,
|
||||
|
|
|
@ -90,7 +90,7 @@ impl AstIdMap {
|
|||
}
|
||||
|
||||
pub(crate) fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
|
||||
self.arena[id.raw].cast::<N>().unwrap()
|
||||
self.arena[id.raw].clone().cast::<N>().unwrap()
|
||||
}
|
||||
|
||||
fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
|
||||
|
|
|
@ -232,7 +232,7 @@ mod tests {
|
|||
let quoted = quote!(#a);
|
||||
assert_eq!(quoted.to_string(), "hello");
|
||||
let t = format!("{:?}", quoted);
|
||||
assert_eq!(t, "Subtree { delimiter: None, token_trees: [Leaf(Ident(Ident { text: \"hello\", id: TokenId(4294967295) }))] }");
|
||||
assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -24,6 +24,8 @@ ra_prof = { path = "../ra_prof" }
|
|||
ra_syntax = { path = "../ra_syntax" }
|
||||
test_utils = { path = "../test_utils" }
|
||||
|
||||
scoped-tls = "1"
|
||||
|
||||
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" }
|
||||
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" }
|
||||
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" }
|
||||
|
|
|
@ -11,7 +11,7 @@ use ra_db::{impl_intern_key, salsa, CrateId, Upcast};
|
|||
use ra_prof::profile;
|
||||
|
||||
use crate::{
|
||||
method_resolution::CrateImplDefs,
|
||||
method_resolution::{CrateImplDefs, TyFingerprint},
|
||||
traits::{chalk, AssocTyValue, Impl},
|
||||
Binders, CallableDef, GenericPredicate, InferenceResult, PolyFnSig, Substs, TraitRef, Ty,
|
||||
TyDefId, TypeCtor, ValueTyDefId,
|
||||
|
@ -65,7 +65,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
|||
fn impls_in_crate(&self, krate: CrateId) -> Arc<CrateImplDefs>;
|
||||
|
||||
#[salsa::invoke(crate::traits::impls_for_trait_query)]
|
||||
fn impls_for_trait(&self, krate: CrateId, trait_: TraitId) -> Arc<[ImplId]>;
|
||||
fn impls_for_trait(
|
||||
&self,
|
||||
krate: CrateId,
|
||||
trait_: TraitId,
|
||||
self_ty_fp: Option<TyFingerprint>,
|
||||
) -> Arc<[ImplId]>;
|
||||
|
||||
// Interned IDs for Chalk integration
|
||||
#[salsa::interned]
|
||||
|
|
|
@ -21,7 +21,7 @@ impl Diagnostic for NoSuchField {
|
|||
}
|
||||
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.field.into() }
|
||||
InFile { file_id: self.file, value: self.field.clone().into() }
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
|
@ -45,7 +45,7 @@ impl Diagnostic for MissingFields {
|
|||
buf
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.field_list.into() }
|
||||
InFile { file_id: self.file, value: self.field_list.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
|
@ -62,6 +62,29 @@ impl AstDiagnostic for MissingFields {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingPatFields {
|
||||
pub file: HirFileId,
|
||||
pub field_list: AstPtr<ast::RecordFieldPatList>,
|
||||
pub missed_fields: Vec<Name>,
|
||||
}
|
||||
|
||||
impl Diagnostic for MissingPatFields {
|
||||
fn message(&self) -> String {
|
||||
let mut buf = String::from("Missing structure fields:\n");
|
||||
for field in &self.missed_fields {
|
||||
format_to!(buf, "- {}", field);
|
||||
}
|
||||
buf
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.field_list.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingMatchArms {
|
||||
pub file: HirFileId,
|
||||
|
@ -74,7 +97,7 @@ impl Diagnostic for MissingMatchArms {
|
|||
String::from("Missing match arm")
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.match_expr.into() }
|
||||
InFile { file_id: self.file, value: self.match_expr.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
|
@ -92,7 +115,7 @@ impl Diagnostic for MissingOkInTailExpr {
|
|||
"wrap return expression in Ok".to_string()
|
||||
}
|
||||
fn source(&self) -> InFile<SyntaxNodePtr> {
|
||||
InFile { file_id: self.file, value: self.expr.into() }
|
||||
InFile { file_id: self.file, value: self.expr.clone().into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
|
|
|
@ -247,9 +247,8 @@ impl HirDisplay for ApplicationTy {
|
|||
}
|
||||
}
|
||||
TypeCtor::Closure { .. } => {
|
||||
let sig = self.parameters[0]
|
||||
.callable_sig(f.db)
|
||||
.expect("first closure parameter should contain signature");
|
||||
let sig = self.parameters[0].callable_sig(f.db);
|
||||
if let Some(sig) = sig {
|
||||
if sig.params().is_empty() {
|
||||
write!(f, "||")?;
|
||||
} else if f.omit_verbose_types() {
|
||||
|
@ -260,6 +259,9 @@ impl HirDisplay for ApplicationTy {
|
|||
write!(f, "|")?;
|
||||
};
|
||||
write!(f, " -> {}", sig.ret().display(f.db))?;
|
||||
} else {
|
||||
write!(f, "{{closure}}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
|
|
@ -9,7 +9,7 @@ use rustc_hash::FxHashSet;
|
|||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
diagnostics::{MissingFields, MissingMatchArms, MissingOkInTailExpr},
|
||||
diagnostics::{MissingFields, MissingMatchArms, MissingOkInTailExpr, MissingPatFields},
|
||||
utils::variant_data,
|
||||
ApplicationTy, InferenceResult, Ty, TypeCtor,
|
||||
_match::{is_useful, MatchCheckCtx, Matrix, PatStack, Usefulness},
|
||||
|
@ -49,13 +49,48 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
if let Some((variant_def, missed_fields, true)) =
|
||||
record_literal_missing_fields(db, &self.infer, id, expr)
|
||||
{
|
||||
self.create_record_literal_missing_fields_diagnostic(
|
||||
id,
|
||||
db,
|
||||
variant_def,
|
||||
missed_fields,
|
||||
);
|
||||
}
|
||||
if let Expr::Match { expr, arms } = expr {
|
||||
self.validate_match(id, *expr, arms, db, self.infer.clone());
|
||||
}
|
||||
}
|
||||
for (id, pat) in body.pats.iter() {
|
||||
if let Some((variant_def, missed_fields, true)) =
|
||||
record_pattern_missing_fields(db, &self.infer, id, pat)
|
||||
{
|
||||
self.create_record_pattern_missing_fields_diagnostic(
|
||||
id,
|
||||
db,
|
||||
variant_def,
|
||||
missed_fields,
|
||||
);
|
||||
}
|
||||
}
|
||||
let body_expr = &body[body.body_expr];
|
||||
if let Expr::Block { tail: Some(t), .. } = body_expr {
|
||||
self.validate_results_in_tail_expr(body.body_expr, *t, db);
|
||||
}
|
||||
}
|
||||
|
||||
fn create_record_literal_missing_fields_diagnostic(
|
||||
&mut self,
|
||||
id: ExprId,
|
||||
db: &dyn HirDatabase,
|
||||
variant_def: VariantId,
|
||||
missed_fields: Vec<LocalStructFieldId>,
|
||||
) {
|
||||
// XXX: only look at source_map if we do have missing fields
|
||||
let (_, source_map) = db.body_with_source_map(self.func.into());
|
||||
|
||||
if let Ok(source_ptr) = source_map.expr_syntax(id) {
|
||||
if let Some(expr) = source_ptr.value.left() {
|
||||
let root = source_ptr.file_syntax(db.upcast());
|
||||
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
|
||||
if let ast::Expr::RecordLit(record_lit) = &source_ptr.value.to_node(&root) {
|
||||
if let Some(field_list) = record_lit.record_field_list() {
|
||||
let variant_data = variant_data(db.upcast(), variant_def);
|
||||
let missed_fields = missed_fields
|
||||
|
@ -71,14 +106,35 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Expr::Match { expr, arms } = expr {
|
||||
self.validate_match(id, *expr, arms, db, self.infer.clone());
|
||||
|
||||
fn create_record_pattern_missing_fields_diagnostic(
|
||||
&mut self,
|
||||
id: PatId,
|
||||
db: &dyn HirDatabase,
|
||||
variant_def: VariantId,
|
||||
missed_fields: Vec<LocalStructFieldId>,
|
||||
) {
|
||||
// XXX: only look at source_map if we do have missing fields
|
||||
let (_, source_map) = db.body_with_source_map(self.func.into());
|
||||
|
||||
if let Ok(source_ptr) = source_map.pat_syntax(id) {
|
||||
if let Some(expr) = source_ptr.value.as_ref().left() {
|
||||
let root = source_ptr.file_syntax(db.upcast());
|
||||
if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
|
||||
if let Some(field_list) = record_pat.record_field_pat_list() {
|
||||
let variant_data = variant_data(db.upcast(), variant_def);
|
||||
let missed_fields = missed_fields
|
||||
.into_iter()
|
||||
.map(|idx| variant_data.fields()[idx].name.clone())
|
||||
.collect();
|
||||
self.sink.push(MissingPatFields {
|
||||
file: source_ptr.file_id,
|
||||
field_list: AstPtr::new(&field_list),
|
||||
missed_fields,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
let body_expr = &body[body.body_expr];
|
||||
if let Expr::Block { tail: Some(t), .. } = body_expr {
|
||||
self.validate_results_in_tail_expr(body.body_expr, *t, db);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -147,9 +203,8 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
}
|
||||
|
||||
if let Ok(source_ptr) = source_map.expr_syntax(id) {
|
||||
if let Some(expr) = source_ptr.value.left() {
|
||||
let root = source_ptr.file_syntax(db.upcast());
|
||||
if let ast::Expr::MatchExpr(match_expr) = expr.to_node(&root) {
|
||||
if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
|
||||
if let (Some(match_expr), Some(arms)) =
|
||||
(match_expr.expr(), match_expr.match_arm_list())
|
||||
{
|
||||
|
@ -162,7 +217,6 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
|
||||
// the mismatch will be on the whole block currently
|
||||
|
@ -189,9 +243,8 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
|||
let (_, source_map) = db.body_with_source_map(self.func.into());
|
||||
|
||||
if let Ok(source_ptr) = source_map.expr_syntax(id) {
|
||||
if let Some(expr) = source_ptr.value.left() {
|
||||
self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr });
|
||||
}
|
||||
self.sink
|
||||
.push(MissingOkInTailExpr { file: source_ptr.file_id, expr: source_ptr.value });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -232,9 +285,9 @@ pub fn record_pattern_missing_fields(
|
|||
infer: &InferenceResult,
|
||||
id: PatId,
|
||||
pat: &Pat,
|
||||
) -> Option<(VariantId, Vec<LocalStructFieldId>)> {
|
||||
let fields = match pat {
|
||||
Pat::Record { path: _, args } => args,
|
||||
) -> Option<(VariantId, Vec<LocalStructFieldId>, /*exhaustive*/ bool)> {
|
||||
let (fields, exhaustive) = match pat {
|
||||
Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
|
@ -254,5 +307,5 @@ pub fn record_pattern_missing_fields(
|
|||
if missed_fields.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some((variant_def, missed_fields))
|
||||
Some((variant_def, missed_fields, exhaustive))
|
||||
}
|
||||
|
|
|
@ -158,7 +158,7 @@ impl<'a> InferenceContext<'a> {
|
|||
Pat::TupleStruct { path: p, args: subpats } => {
|
||||
self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat)
|
||||
}
|
||||
Pat::Record { path: p, args: fields } => {
|
||||
Pat::Record { path: p, args: fields, ellipsis: _ } => {
|
||||
self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat)
|
||||
}
|
||||
Pat::Path(path) => {
|
||||
|
|
|
@ -34,7 +34,7 @@ impl TyFingerprint {
|
|||
/// Creates a TyFingerprint for looking up an impl. Only certain types can
|
||||
/// have impls: if we have some `struct S`, we can have an `impl S`, but not
|
||||
/// `impl &S`. Hence, this will return `None` for reference types and such.
|
||||
fn for_impl(ty: &Ty) -> Option<TyFingerprint> {
|
||||
pub(crate) fn for_impl(ty: &Ty) -> Option<TyFingerprint> {
|
||||
match ty {
|
||||
Ty::Apply(a_ty) => Some(TyFingerprint::Apply(a_ty.ctor)),
|
||||
_ => None,
|
||||
|
@ -45,7 +45,7 @@ impl TyFingerprint {
|
|||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct CrateImplDefs {
|
||||
impls: FxHashMap<TyFingerprint, Vec<ImplId>>,
|
||||
impls_by_trait: FxHashMap<TraitId, Vec<ImplId>>,
|
||||
impls_by_trait: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
|
||||
}
|
||||
|
||||
impl CrateImplDefs {
|
||||
|
@ -59,7 +59,14 @@ impl CrateImplDefs {
|
|||
for impl_id in module_data.scope.impls() {
|
||||
match db.impl_trait(impl_id) {
|
||||
Some(tr) => {
|
||||
res.impls_by_trait.entry(tr.value.trait_).or_default().push(impl_id);
|
||||
let self_ty = db.impl_self_ty(impl_id);
|
||||
let self_ty_fp = TyFingerprint::for_impl(&self_ty.value);
|
||||
res.impls_by_trait
|
||||
.entry(tr.value.trait_)
|
||||
.or_default()
|
||||
.entry(self_ty_fp)
|
||||
.or_default()
|
||||
.push(impl_id);
|
||||
}
|
||||
None => {
|
||||
let self_ty = db.impl_self_ty(impl_id);
|
||||
|
@ -79,11 +86,39 @@ impl CrateImplDefs {
|
|||
}
|
||||
|
||||
pub fn lookup_impl_defs_for_trait(&self, tr: TraitId) -> impl Iterator<Item = ImplId> + '_ {
|
||||
self.impls_by_trait.get(&tr).into_iter().flatten().copied()
|
||||
self.impls_by_trait
|
||||
.get(&tr)
|
||||
.into_iter()
|
||||
.flat_map(|m| m.values().flat_map(|v| v.iter().copied()))
|
||||
}
|
||||
|
||||
pub fn lookup_impl_defs_for_trait_and_ty(
|
||||
&self,
|
||||
tr: TraitId,
|
||||
fp: TyFingerprint,
|
||||
) -> impl Iterator<Item = ImplId> + '_ {
|
||||
self.impls_by_trait
|
||||
.get(&tr)
|
||||
.and_then(|m| m.get(&Some(fp)))
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.copied()
|
||||
.chain(
|
||||
self.impls_by_trait
|
||||
.get(&tr)
|
||||
.and_then(|m| m.get(&None))
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.copied(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplId> + 'a {
|
||||
self.impls.values().chain(self.impls_by_trait.values()).flatten().copied()
|
||||
self.impls
|
||||
.values()
|
||||
.chain(self.impls_by_trait.values().flat_map(|m| m.values()))
|
||||
.flatten()
|
||||
.copied()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ use insta::assert_snapshot;
|
|||
use ra_db::{fixture::WithFixture, salsa::Database, FilePosition, SourceDatabase};
|
||||
use ra_syntax::{
|
||||
algo,
|
||||
ast::{self, AstNode, AstToken},
|
||||
ast::{self, AstNode},
|
||||
};
|
||||
use stdx::format_to;
|
||||
|
||||
|
@ -82,12 +82,10 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
|
|||
|
||||
for (expr, ty) in inference_result.type_of_expr.iter() {
|
||||
let syntax_ptr = match body_source_map.expr_syntax(expr) {
|
||||
Ok(sp) => {
|
||||
sp.map(|ast| ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()))
|
||||
}
|
||||
Ok(sp) => sp.map(|ast| ast.syntax_node_ptr()),
|
||||
Err(SyntheticSyntax) => continue,
|
||||
};
|
||||
types.push((syntax_ptr, ty));
|
||||
types.push((syntax_ptr.clone(), ty));
|
||||
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
|
||||
mismatches.push((syntax_ptr, mismatch));
|
||||
}
|
||||
|
@ -101,7 +99,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
|
|||
let node = src_ptr.value.to_node(&src_ptr.file_syntax(&db));
|
||||
|
||||
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) {
|
||||
(self_param.self_kw().unwrap().syntax().text_range(), "self".to_string())
|
||||
(self_param.self_token().unwrap().text_range(), "self".to_string())
|
||||
} else {
|
||||
(src_ptr.value.range(), node.text().to_string().replace("\n", " "))
|
||||
};
|
||||
|
@ -409,3 +407,43 @@ fn no_such_field_with_feature_flag_diagnostics_on_struct_fields() {
|
|||
|
||||
assert_snapshot!(diagnostics, @r###""###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_record_pat_field_diagnostic() {
|
||||
let diagnostics = TestDB::with_files(
|
||||
r"
|
||||
//- /lib.rs
|
||||
struct S { foo: i32, bar: () }
|
||||
fn baz(s: S) {
|
||||
let S { foo: _ } = s;
|
||||
}
|
||||
",
|
||||
)
|
||||
.diagnostics()
|
||||
.0;
|
||||
|
||||
assert_snapshot!(diagnostics, @r###"
|
||||
"{ foo: _ }": Missing structure fields:
|
||||
- bar
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() {
|
||||
let diagnostics = TestDB::with_files(
|
||||
r"
|
||||
//- /lib.rs
|
||||
struct S { foo: i32, bar: () }
|
||||
fn baz(s: S) -> i32 {
|
||||
match s {
|
||||
S { foo, .. } => foo,
|
||||
}
|
||||
}
|
||||
",
|
||||
)
|
||||
.diagnostics()
|
||||
.0;
|
||||
|
||||
assert_snapshot!(diagnostics, @"");
|
||||
}
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
use std::fs;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use ra_db::fixture::WithFixture;
|
||||
|
||||
use super::{infer, type_at, type_at_pos};
|
||||
use test_utils::project_dir;
|
||||
|
||||
use crate::test_db::TestDB;
|
||||
|
||||
use super::{infer, type_at, type_at_pos};
|
||||
|
||||
#[test]
|
||||
fn cfg_impl_def() {
|
||||
let (db, pos) = TestDB::with_position(
|
||||
|
@ -481,6 +484,30 @@ fn bar() -> u32 {0}
|
|||
assert_eq!("u32", type_at_pos(&db, pos));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn include_accidentally_quadratic() {
|
||||
let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic");
|
||||
let big_file = fs::read_to_string(file).unwrap();
|
||||
let big_file = vec![big_file; 10].join("\n");
|
||||
|
||||
let fixture = r#"
|
||||
//- /main.rs
|
||||
#[rustc_builtin_macro]
|
||||
macro_rules! include {() => {}}
|
||||
|
||||
include!("foo.rs");
|
||||
|
||||
fn main() {
|
||||
RegisterBlock { }<|>;
|
||||
}
|
||||
"#;
|
||||
let fixture = format!("{}\n//- /foo.rs\n{}", fixture, big_file);
|
||||
|
||||
let (db, pos) = TestDB::with_position(&fixture);
|
||||
assert_eq!("RegisterBlock", type_at_pos(&db, pos));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_builtin_macros_include_concat() {
|
||||
let (db, pos) = TestDB::with_position(
|
||||
|
|
|
@ -7,7 +7,7 @@ use ra_db::{impl_intern_key, salsa, CrateId};
|
|||
use ra_prof::profile;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::{db::HirDatabase, DebruijnIndex};
|
||||
use crate::{db::HirDatabase, method_resolution::TyFingerprint, DebruijnIndex};
|
||||
|
||||
use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk};
|
||||
|
||||
|
@ -40,7 +40,12 @@ pub(crate) fn impls_for_trait_query(
|
|||
db: &dyn HirDatabase,
|
||||
krate: CrateId,
|
||||
trait_: TraitId,
|
||||
self_ty_fp: Option<TyFingerprint>,
|
||||
) -> Arc<[ImplId]> {
|
||||
// FIXME: We could be a lot smarter here - because of the orphan rules and
|
||||
// the fact that the trait and the self type need to be in the dependency
|
||||
// tree of a crate somewhere for an impl to exist, we could skip looking in
|
||||
// a lot of crates completely
|
||||
let mut impls = FxHashSet::default();
|
||||
// We call the query recursively here. On the one hand, this means we can
|
||||
// reuse results from queries for different crates; on the other hand, this
|
||||
|
@ -48,10 +53,13 @@ pub(crate) fn impls_for_trait_query(
|
|||
// ones the user is editing), so this may actually be a waste of memory. I'm
|
||||
// doing it like this mainly for simplicity for now.
|
||||
for dep in &db.crate_graph()[krate].dependencies {
|
||||
impls.extend(db.impls_for_trait(dep.crate_id, trait_).iter());
|
||||
impls.extend(db.impls_for_trait(dep.crate_id, trait_, self_ty_fp).iter());
|
||||
}
|
||||
let crate_impl_defs = db.impls_in_crate(krate);
|
||||
impls.extend(crate_impl_defs.lookup_impl_defs_for_trait(trait_));
|
||||
match self_ty_fp {
|
||||
Some(fp) => impls.extend(crate_impl_defs.lookup_impl_defs_for_trait_and_ty(trait_, fp)),
|
||||
None => impls.extend(crate_impl_defs.lookup_impl_defs_for_trait(trait_)),
|
||||
}
|
||||
impls.into_iter().collect()
|
||||
}
|
||||
|
||||
|
@ -177,7 +185,7 @@ fn solve(
|
|||
|
||||
let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
|
||||
|
||||
let solution = solver.solve_limited(&context, goal, || {
|
||||
let should_continue = || {
|
||||
context.db.check_canceled();
|
||||
let remaining = fuel.get();
|
||||
fuel.set(remaining - 1);
|
||||
|
@ -185,12 +193,21 @@ fn solve(
|
|||
log::debug!("fuel exhausted");
|
||||
}
|
||||
remaining > 0
|
||||
});
|
||||
};
|
||||
let mut solve = || solver.solve_limited(&context, goal, should_continue);
|
||||
// don't set the TLS for Chalk unless Chalk debugging is active, to make
|
||||
// extra sure we only use it for debugging
|
||||
let solution =
|
||||
if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() };
|
||||
|
||||
log::debug!("solve({:?}) => {:?}", goal, solution);
|
||||
solution
|
||||
}
|
||||
|
||||
fn is_chalk_debug() -> bool {
|
||||
std::env::var("CHALK_DEBUG").is_ok()
|
||||
}
|
||||
|
||||
fn solution_from_chalk(
|
||||
db: &dyn HirDatabase,
|
||||
solution: chalk_solve::Solution<Interner>,
|
||||
|
|
|
@ -16,10 +16,12 @@ use ra_db::{
|
|||
|
||||
use super::{builtin, AssocTyValue, Canonical, ChalkContext, Impl, Obligation};
|
||||
use crate::{
|
||||
db::HirDatabase, display::HirDisplay, utils::generics, ApplicationTy, GenericPredicate,
|
||||
ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
|
||||
db::HirDatabase, display::HirDisplay, method_resolution::TyFingerprint, utils::generics,
|
||||
ApplicationTy, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
|
||||
};
|
||||
|
||||
pub(super) mod tls;
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
|
||||
pub struct Interner;
|
||||
|
||||
|
@ -33,90 +35,85 @@ impl chalk_ir::interner::Interner for Interner {
|
|||
type Identifier = TypeAliasId;
|
||||
type DefId = InternId;
|
||||
|
||||
// FIXME: implement these
|
||||
fn debug_struct_id(
|
||||
_type_kind_id: chalk_ir::StructId<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
type_kind_id: StructId,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt)))
|
||||
}
|
||||
|
||||
fn debug_trait_id(
|
||||
_type_kind_id: chalk_ir::TraitId<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
fn debug_trait_id(type_kind_id: TraitId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
|
||||
tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt)))
|
||||
}
|
||||
|
||||
fn debug_assoc_type_id(
|
||||
_id: chalk_ir::AssocTypeId<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
fn debug_assoc_type_id(id: AssocTypeId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
|
||||
tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt)))
|
||||
}
|
||||
|
||||
fn debug_alias(
|
||||
_projection: &chalk_ir::AliasTy<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
alias: &chalk_ir::AliasTy<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt)))
|
||||
}
|
||||
|
||||
fn debug_ty(_ty: &chalk_ir::Ty<Self>, _fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
|
||||
None
|
||||
fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
|
||||
tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt)))
|
||||
}
|
||||
|
||||
fn debug_lifetime(
|
||||
_lifetime: &chalk_ir::Lifetime<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
lifetime: &chalk_ir::Lifetime<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
tls::with_current_program(|prog| Some(prog?.debug_lifetime(lifetime, fmt)))
|
||||
}
|
||||
|
||||
fn debug_parameter(
|
||||
_parameter: &Parameter<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
parameter: &Parameter<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
tls::with_current_program(|prog| Some(prog?.debug_parameter(parameter, fmt)))
|
||||
}
|
||||
|
||||
fn debug_goal(_goal: &Goal<Self>, _fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
|
||||
None
|
||||
fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
|
||||
tls::with_current_program(|prog| Some(prog?.debug_goal(goal, fmt)))
|
||||
}
|
||||
|
||||
fn debug_goals(
|
||||
_goals: &chalk_ir::Goals<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
goals: &chalk_ir::Goals<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
tls::with_current_program(|prog| Some(prog?.debug_goals(goals, fmt)))
|
||||
}
|
||||
|
||||
fn debug_program_clause_implication(
|
||||
_pci: &chalk_ir::ProgramClauseImplication<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
pci: &chalk_ir::ProgramClauseImplication<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
tls::with_current_program(|prog| Some(prog?.debug_program_clause_implication(pci, fmt)))
|
||||
}
|
||||
|
||||
fn debug_application_ty(
|
||||
_application_ty: &chalk_ir::ApplicationTy<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
application_ty: &chalk_ir::ApplicationTy<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
tls::with_current_program(|prog| Some(prog?.debug_application_ty(application_ty, fmt)))
|
||||
}
|
||||
|
||||
fn debug_substitution(
|
||||
_substitution: &chalk_ir::Substitution<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
substitution: &chalk_ir::Substitution<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
tls::with_current_program(|prog| Some(prog?.debug_substitution(substitution, fmt)))
|
||||
}
|
||||
|
||||
fn debug_separator_trait_ref(
|
||||
_separator_trait_ref: &chalk_ir::SeparatorTraitRef<Self>,
|
||||
_fmt: &mut fmt::Formatter<'_>,
|
||||
separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Option<fmt::Result> {
|
||||
None
|
||||
tls::with_current_program(|prog| {
|
||||
Some(prog?.debug_separator_trait_ref(separator_trait_ref, fmt))
|
||||
})
|
||||
}
|
||||
|
||||
fn intern_ty(&self, ty: chalk_ir::TyData<Self>) -> Box<chalk_ir::TyData<Self>> {
|
||||
|
@ -650,19 +647,22 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
|
|||
debug!("impls_for_trait {:?}", trait_id);
|
||||
let trait_: hir_def::TraitId = from_chalk(self.db, trait_id);
|
||||
|
||||
let ty: Ty = from_chalk(self.db, parameters[0].assert_ty_ref(&Interner).clone());
|
||||
|
||||
let self_ty_fp = TyFingerprint::for_impl(&ty);
|
||||
|
||||
// Note: Since we're using impls_for_trait, only impls where the trait
|
||||
// can be resolved should ever reach Chalk. `impl_datum` relies on that
|
||||
// and will panic if the trait can't be resolved.
|
||||
let mut result: Vec<_> = self
|
||||
.db
|
||||
.impls_for_trait(self.krate, trait_)
|
||||
.impls_for_trait(self.krate, trait_, self_ty_fp)
|
||||
.iter()
|
||||
.copied()
|
||||
.map(Impl::ImplDef)
|
||||
.map(|impl_| impl_.to_chalk(self.db))
|
||||
.collect();
|
||||
|
||||
let ty: Ty = from_chalk(self.db, parameters[0].assert_ty_ref(&Interner).clone());
|
||||
let arg: Option<Ty> =
|
||||
parameters.get(1).map(|p| from_chalk(self.db, p.assert_ty_ref(&Interner).clone()));
|
||||
|
||||
|
|
231
crates/ra_hir_ty/src/traits/chalk/tls.rs
Normal file
231
crates/ra_hir_ty/src/traits/chalk/tls.rs
Normal file
|
@ -0,0 +1,231 @@
|
|||
//! Implementation of Chalk debug helper functions using TLS.
|
||||
use std::fmt;
|
||||
|
||||
use chalk_ir::{AliasTy, Goal, Goals, Lifetime, Parameter, ProgramClauseImplication, TypeName};
|
||||
|
||||
use super::{from_chalk, Interner};
|
||||
use crate::{db::HirDatabase, CallableDef, TypeCtor};
|
||||
use hir_def::{AdtId, AssocContainerId, Lookup, TypeAliasId};
|
||||
|
||||
pub use unsafe_tls::{set_current_program, with_current_program};
|
||||
|
||||
pub struct DebugContext<'a>(&'a (dyn HirDatabase + 'a));
|
||||
|
||||
impl DebugContext<'_> {
|
||||
pub fn debug_struct_id(
|
||||
&self,
|
||||
id: super::StructId,
|
||||
f: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
let type_ctor: TypeCtor = from_chalk(self.0, TypeName::Struct(id));
|
||||
match type_ctor {
|
||||
TypeCtor::Bool => write!(f, "bool")?,
|
||||
TypeCtor::Char => write!(f, "char")?,
|
||||
TypeCtor::Int(t) => write!(f, "{}", t)?,
|
||||
TypeCtor::Float(t) => write!(f, "{}", t)?,
|
||||
TypeCtor::Str => write!(f, "str")?,
|
||||
TypeCtor::Slice => write!(f, "slice")?,
|
||||
TypeCtor::Array => write!(f, "array")?,
|
||||
TypeCtor::RawPtr(m) => write!(f, "*{}", m.as_keyword_for_ptr())?,
|
||||
TypeCtor::Ref(m) => write!(f, "&{}", m.as_keyword_for_ref())?,
|
||||
TypeCtor::Never => write!(f, "!")?,
|
||||
TypeCtor::Tuple { .. } => {
|
||||
write!(f, "()")?;
|
||||
}
|
||||
TypeCtor::FnPtr { .. } => {
|
||||
write!(f, "fn")?;
|
||||
}
|
||||
TypeCtor::FnDef(def) => {
|
||||
let name = match def {
|
||||
CallableDef::FunctionId(ff) => self.0.function_data(ff).name.clone(),
|
||||
CallableDef::StructId(s) => self.0.struct_data(s).name.clone(),
|
||||
CallableDef::EnumVariantId(e) => {
|
||||
let enum_data = self.0.enum_data(e.parent);
|
||||
enum_data.variants[e.local_id].name.clone()
|
||||
}
|
||||
};
|
||||
match def {
|
||||
CallableDef::FunctionId(_) => write!(f, "{{fn {}}}", name)?,
|
||||
CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {
|
||||
write!(f, "{{ctor {}}}", name)?
|
||||
}
|
||||
}
|
||||
}
|
||||
TypeCtor::Adt(def_id) => {
|
||||
let name = match def_id {
|
||||
AdtId::StructId(it) => self.0.struct_data(it).name.clone(),
|
||||
AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
|
||||
AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
|
||||
};
|
||||
write!(f, "{}", name)?;
|
||||
}
|
||||
TypeCtor::AssociatedType(type_alias) => {
|
||||
let trait_ = match type_alias.lookup(self.0.upcast()).container {
|
||||
AssocContainerId::TraitId(it) => it,
|
||||
_ => panic!("not an associated type"),
|
||||
};
|
||||
let trait_name = self.0.trait_data(trait_).name.clone();
|
||||
let name = self.0.type_alias_data(type_alias).name.clone();
|
||||
write!(f, "{}::{}", trait_name, name)?;
|
||||
}
|
||||
TypeCtor::Closure { def, expr } => {
|
||||
write!(f, "{{closure {:?} in {:?}}}", expr.into_raw(), def)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn debug_trait_id(
|
||||
&self,
|
||||
id: super::TraitId,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
let trait_: hir_def::TraitId = from_chalk(self.0, id);
|
||||
let trait_data = self.0.trait_data(trait_);
|
||||
write!(fmt, "{}", trait_data.name)
|
||||
}
|
||||
|
||||
pub fn debug_assoc_type_id(
|
||||
&self,
|
||||
id: super::AssocTypeId,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
let type_alias: TypeAliasId = from_chalk(self.0, id);
|
||||
let type_alias_data = self.0.type_alias_data(type_alias);
|
||||
let trait_ = match type_alias.lookup(self.0.upcast()).container {
|
||||
AssocContainerId::TraitId(t) => t,
|
||||
_ => panic!("associated type not in trait"),
|
||||
};
|
||||
let trait_data = self.0.trait_data(trait_);
|
||||
write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
|
||||
}
|
||||
|
||||
pub fn debug_alias(
|
||||
&self,
|
||||
alias: &AliasTy<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
let type_alias: TypeAliasId = from_chalk(self.0, alias.associated_ty_id);
|
||||
let type_alias_data = self.0.type_alias_data(type_alias);
|
||||
let trait_ = match type_alias.lookup(self.0.upcast()).container {
|
||||
AssocContainerId::TraitId(t) => t,
|
||||
_ => panic!("associated type not in trait"),
|
||||
};
|
||||
let trait_data = self.0.trait_data(trait_);
|
||||
let params = alias.substitution.parameters(&Interner);
|
||||
write!(
|
||||
fmt,
|
||||
"<{:?} as {}<{:?}>>::{}",
|
||||
¶ms[0],
|
||||
trait_data.name,
|
||||
¶ms[1..],
|
||||
type_alias_data.name
|
||||
)
|
||||
}
|
||||
|
||||
pub fn debug_ty(
|
||||
&self,
|
||||
ty: &chalk_ir::Ty<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
write!(fmt, "{:?}", ty.data(&Interner))
|
||||
}
|
||||
|
||||
pub fn debug_lifetime(
|
||||
&self,
|
||||
lifetime: &Lifetime<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
write!(fmt, "{:?}", lifetime.data(&Interner))
|
||||
}
|
||||
|
||||
pub fn debug_parameter(
|
||||
&self,
|
||||
parameter: &Parameter<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
write!(fmt, "{:?}", parameter.data(&Interner).inner_debug())
|
||||
}
|
||||
|
||||
pub fn debug_goal(
|
||||
&self,
|
||||
goal: &Goal<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
let goal_data = goal.data(&Interner);
|
||||
write!(fmt, "{:?}", goal_data)
|
||||
}
|
||||
|
||||
pub fn debug_goals(
|
||||
&self,
|
||||
goals: &Goals<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
write!(fmt, "{:?}", goals.debug(&Interner))
|
||||
}
|
||||
|
||||
pub fn debug_program_clause_implication(
|
||||
&self,
|
||||
pci: &ProgramClauseImplication<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
write!(fmt, "{:?}", pci.debug(&Interner))
|
||||
}
|
||||
|
||||
pub fn debug_application_ty(
|
||||
&self,
|
||||
application_ty: &chalk_ir::ApplicationTy<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
write!(fmt, "{:?}", application_ty.debug(&Interner))
|
||||
}
|
||||
|
||||
pub fn debug_substitution(
|
||||
&self,
|
||||
substitution: &chalk_ir::Substitution<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
write!(fmt, "{:?}", substitution.debug(&Interner))
|
||||
}
|
||||
|
||||
pub fn debug_separator_trait_ref(
|
||||
&self,
|
||||
separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
|
||||
fmt: &mut fmt::Formatter<'_>,
|
||||
) -> Result<(), fmt::Error> {
|
||||
write!(fmt, "{:?}", separator_trait_ref.debug(&Interner))
|
||||
}
|
||||
}
|
||||
|
||||
mod unsafe_tls {
|
||||
use super::DebugContext;
|
||||
use crate::db::HirDatabase;
|
||||
use scoped_tls::scoped_thread_local;
|
||||
|
||||
scoped_thread_local!(static PROGRAM: DebugContext);
|
||||
|
||||
pub fn with_current_program<R>(
|
||||
op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
|
||||
) -> R {
|
||||
if PROGRAM.is_set() {
|
||||
PROGRAM.with(|prog| op(Some(prog)))
|
||||
} else {
|
||||
op(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
|
||||
where
|
||||
OP: FnOnce() -> R,
|
||||
{
|
||||
let ctx = DebugContext(p);
|
||||
// we're transmuting the lifetime in the DebugContext to static. This is
|
||||
// fine because we only keep the reference for the lifetime of this
|
||||
// function, *and* the only way to access the context is through
|
||||
// `with_current_program`, which hides the lifetime through the `for`
|
||||
// type.
|
||||
let static_p: &DebugContext<'static> =
|
||||
unsafe { std::mem::transmute::<&DebugContext, &DebugContext<'static>>(&ctx) };
|
||||
PROGRAM.set(static_p, || op())
|
||||
}
|
||||
}
|
|
@ -72,7 +72,6 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty
|
|||
}
|
||||
for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() {
|
||||
// FIXME: Handle visibility
|
||||
// TODO: add the same behavior with type ?
|
||||
acc.add_tuple_field(ctx, i, &ty);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use ra_syntax::{ast, match_ast, AstNode};
|
||||
use ra_syntax::{
|
||||
ast::{self, ModuleItemOwner},
|
||||
match_ast, AstNode,
|
||||
};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::completion::{CompletionContext, CompletionItem, CompletionKind, Completions};
|
||||
|
@ -16,11 +19,19 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
|||
|
||||
let mut params = FxHashMap::default();
|
||||
for node in ctx.token.parent().ancestors() {
|
||||
match_ast! {
|
||||
let items = match_ast! {
|
||||
match node {
|
||||
ast::SourceFile(it) => process(it, &mut params),
|
||||
ast::ItemList(it) => process(it, &mut params),
|
||||
_ => (),
|
||||
ast::SourceFile(it) => it.items(),
|
||||
ast::ItemList(it) => it.items(),
|
||||
_ => continue,
|
||||
}
|
||||
};
|
||||
for item in items {
|
||||
if let ast::ModuleItem::FnDef(func) = item {
|
||||
func.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| {
|
||||
let text = param.syntax().text().to_string();
|
||||
params.entry(text).or_insert((0, param)).0 += 1;
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -39,15 +50,6 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
|||
.lookup_by(lookup)
|
||||
.add_to(acc)
|
||||
});
|
||||
|
||||
fn process<N: ast::FnDefOwner>(node: N, params: &mut FxHashMap<String, (u32, ast::Param)>) {
|
||||
node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each(
|
||||
|param| {
|
||||
let text = param.syntax().text().to_string();
|
||||
params.entry(text).or_insert((0, param)).0 += 1;
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -35,7 +35,7 @@ use hir::{self, Docs, HasSource};
|
|||
use ra_assists::utils::get_missing_impl_items;
|
||||
use ra_syntax::{
|
||||
ast::{self, edit, ImplDef},
|
||||
AstNode, SyntaxKind, SyntaxNode, TextRange,
|
||||
AstNode, SyntaxKind, SyntaxNode, TextRange, T,
|
||||
};
|
||||
use ra_text_edit::TextEdit;
|
||||
|
||||
|
@ -204,7 +204,7 @@ fn make_const_compl_syntax(const_: &ast::ConstDef) -> String {
|
|||
let end = const_
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
.find(|s| s.kind() == SyntaxKind::SEMI || s.kind() == SyntaxKind::EQ)
|
||||
.find(|s| s.kind() == T![;] || s.kind() == T![=])
|
||||
.map_or(const_end, |f| f.text_range().start());
|
||||
|
||||
let len = end - start;
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
pub(super) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
if !(ctx.is_trivial_path && !ctx.is_pat_binding_or_const) {
|
||||
if !(ctx.is_trivial_path && !ctx.is_pat_binding_or_const && !ctx.record_lit_syntax.is_some()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use hir::{db::HirDatabase, Semantics, SemanticsScope};
|
||||
use hir::{Semantics, SemanticsScope};
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_node_at_offset},
|
||||
ast,
|
||||
ast::ArgListOwner,
|
||||
AstNode,
|
||||
ast, AstNode,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
};
|
||||
|
@ -196,7 +194,10 @@ impl<'a> CompletionContext<'a> {
|
|||
if let Some(name) = find_node_at_offset::<ast::Name>(&file_with_fake_ident, offset) {
|
||||
if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
|
||||
self.is_pat_binding_or_const = true;
|
||||
if bind_pat.has_at() || bind_pat.is_ref() || bind_pat.is_mutable() {
|
||||
if bind_pat.at_token().is_some()
|
||||
|| bind_pat.ref_token().is_some()
|
||||
|| bind_pat.mut_token().is_some()
|
||||
{
|
||||
self.is_pat_binding_or_const = false;
|
||||
}
|
||||
if bind_pat.syntax().parent().and_then(ast::RecordFieldPatList::cast).is_some() {
|
||||
|
@ -230,7 +231,7 @@ impl<'a> CompletionContext<'a> {
|
|||
self.name_ref_syntax =
|
||||
find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
|
||||
let name_range = name_ref.syntax().text_range();
|
||||
if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() {
|
||||
if ast::RecordField::for_field_name(&name_ref).is_some() {
|
||||
self.record_lit_syntax =
|
||||
self.sema.find_node_at_offset_with_macros(&original_file, offset);
|
||||
}
|
||||
|
|
|
@ -69,7 +69,13 @@ impl FunctionSignature {
|
|||
for field in st.fields(db).into_iter() {
|
||||
let ty = field.signature_ty(db);
|
||||
let raw_param = format!("{}", ty.display(db));
|
||||
parameter_types.push(raw_param.split(':').nth(1).unwrap()[1..].to_string());
|
||||
|
||||
if let Some(param_type) = raw_param.split(':').nth(1) {
|
||||
parameter_types.push(param_type[1..].to_string());
|
||||
} else {
|
||||
// The unwrap_or_else is useful when you have tuple struct
|
||||
parameter_types.push(raw_param.clone());
|
||||
}
|
||||
params.push(raw_param);
|
||||
}
|
||||
|
||||
|
@ -107,8 +113,15 @@ impl FunctionSignature {
|
|||
for field in variant.fields(db).into_iter() {
|
||||
let ty = field.signature_ty(db);
|
||||
let raw_param = format!("{}", ty.display(db));
|
||||
parameter_types.push(raw_param.split(':').nth(1).unwrap()[1..].to_string());
|
||||
params.push(raw_param);
|
||||
if let Some(param_type) = raw_param.split(':').nth(1) {
|
||||
parameter_types.push(param_type[1..].to_string());
|
||||
} else {
|
||||
// The unwrap_or_else is useful when you have tuple
|
||||
parameter_types.push(raw_param);
|
||||
}
|
||||
let name = field.name(db);
|
||||
|
||||
params.push(format!("{}: {}", name, ty.display(db)));
|
||||
}
|
||||
|
||||
Some(
|
||||
|
@ -164,7 +177,7 @@ impl From<&'_ ast::FnDef> for FunctionSignature {
|
|||
has_self_param = true;
|
||||
let raw_param = self_param.syntax().text().to_string();
|
||||
|
||||
// TODO: better solution ?
|
||||
// FIXME: better solution ?
|
||||
res_types.push(
|
||||
raw_param.split(':').nth(1).unwrap_or_else(|| " Self")[1..].to_string(),
|
||||
);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! FIXME: write short doc here
|
||||
//! This module defines multiple types of inlay hints and their visibility
|
||||
|
||||
use hir::{Adt, HirDisplay, Semantics, Type};
|
||||
use ra_ide_db::RootDatabase;
|
||||
|
@ -235,8 +235,10 @@ fn should_show_param_hint(
|
|||
param_name: &str,
|
||||
argument: &ast::Expr,
|
||||
) -> bool {
|
||||
let argument_string = argument.syntax().to_string();
|
||||
if param_name.is_empty() || argument_string.ends_with(param_name) {
|
||||
if param_name.is_empty()
|
||||
|| is_argument_similar_to_param(argument, param_name)
|
||||
|| Some(param_name) == fn_signature.name.as_ref().map(String::as_str)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -245,12 +247,32 @@ fn should_show_param_hint(
|
|||
} else {
|
||||
fn_signature.parameters.len()
|
||||
};
|
||||
// avoid displaying hints for common functions like map, filter, etc.
|
||||
if parameters_len == 1 && (param_name.len() == 1 || param_name == "predicate") {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
// avoid displaying hints for common functions like map, filter, etc.
|
||||
// or other obvious words used in std
|
||||
parameters_len != 1 || !is_obvious_param(param_name)
|
||||
}
|
||||
|
||||
fn is_argument_similar_to_param(argument: &ast::Expr, param_name: &str) -> bool {
|
||||
let argument_string = remove_ref(argument.clone()).syntax().to_string();
|
||||
argument_string.starts_with(¶m_name) || argument_string.ends_with(¶m_name)
|
||||
}
|
||||
|
||||
fn remove_ref(expr: ast::Expr) -> ast::Expr {
|
||||
if let ast::Expr::RefExpr(ref_expr) = &expr {
|
||||
if let Some(inner) = ref_expr.expr() {
|
||||
return inner;
|
||||
}
|
||||
}
|
||||
expr
|
||||
}
|
||||
|
||||
fn is_obvious_param(param_name: &str) -> bool {
|
||||
let is_obvious_param_name = match param_name {
|
||||
"predicate" | "value" | "pat" | "rhs" | "other" => true,
|
||||
_ => false,
|
||||
};
|
||||
param_name.len() == 1 || is_obvious_param_name
|
||||
}
|
||||
|
||||
fn get_fn_signature(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<FunctionSignature> {
|
||||
|
@ -1059,9 +1081,22 @@ impl Test {
|
|||
self
|
||||
}
|
||||
|
||||
fn field(self, value: i32) -> Self {
|
||||
self
|
||||
}
|
||||
|
||||
fn no_hints_expected(&self, _: i32, test_var: i32) {}
|
||||
|
||||
fn frob(&self, frob: bool) {}
|
||||
}
|
||||
|
||||
struct Param {}
|
||||
|
||||
fn different_order(param: &Param) {}
|
||||
fn different_order_mut(param: &mut Param) {}
|
||||
|
||||
fn twiddle(twiddle: bool) {}
|
||||
|
||||
fn main() {
|
||||
let container: TestVarContainer = TestVarContainer { test_var: 42 };
|
||||
let test: Test = Test {};
|
||||
|
@ -1069,11 +1104,23 @@ fn main() {
|
|||
map(22);
|
||||
filter(33);
|
||||
|
||||
let test_processed: Test = test.map(1).filter(2);
|
||||
let test_processed: Test = test.map(1).filter(2).field(3);
|
||||
|
||||
let test_var: i32 = 55;
|
||||
test_processed.no_hints_expected(22, test_var);
|
||||
test_processed.no_hints_expected(33, container.test_var);
|
||||
test_processed.frob(false);
|
||||
|
||||
twiddle(true);
|
||||
|
||||
let param_begin: Param = Param {};
|
||||
different_order(¶m_begin);
|
||||
different_order(&mut param_begin);
|
||||
|
||||
let a: f64 = 7.0;
|
||||
let b: f64 = 4.0;
|
||||
let _: f64 = a.div_euclid(b);
|
||||
let _: f64 = a.abs_sub(b);
|
||||
}"#,
|
||||
);
|
||||
|
||||
|
|
|
@ -152,7 +152,7 @@ fn decl_access(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> Optio
|
|||
if stmt.initializer().is_some() {
|
||||
let pat = stmt.pat()?;
|
||||
if let ast::Pat::BindPat(it) = pat {
|
||||
if it.is_mutable() {
|
||||
if it.mut_token().is_some() {
|
||||
return Some(ReferenceAccess::Write);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -165,7 +165,7 @@ SOURCE_FILE@[0; 60)
|
|||
PATH_SEGMENT@[16; 22)
|
||||
NAME_REF@[16; 22)
|
||||
IDENT@[16; 22) "assert"
|
||||
EXCL@[22; 23) "!"
|
||||
BANG@[22; 23) "!"
|
||||
TOKEN_TREE@[23; 57)
|
||||
L_PAREN@[23; 24) "("
|
||||
STRING@[24; 52) "\"\n fn foo() {\n ..."
|
||||
|
@ -173,7 +173,7 @@ SOURCE_FILE@[0; 60)
|
|||
WHITESPACE@[53; 54) " "
|
||||
STRING@[54; 56) "\"\""
|
||||
R_PAREN@[56; 57) ")"
|
||||
SEMI@[57; 58) ";"
|
||||
SEMICOLON@[57; 58) ";"
|
||||
WHITESPACE@[58; 59) "\n"
|
||||
R_CURLY@[59; 60) "}"
|
||||
"#
|
||||
|
@ -226,7 +226,7 @@ EXPR_STMT@[16; 58)
|
|||
PATH_SEGMENT@[16; 22)
|
||||
NAME_REF@[16; 22)
|
||||
IDENT@[16; 22) "assert"
|
||||
EXCL@[22; 23) "!"
|
||||
BANG@[22; 23) "!"
|
||||
TOKEN_TREE@[23; 57)
|
||||
L_PAREN@[23; 24) "("
|
||||
STRING@[24; 52) "\"\n fn foo() {\n ..."
|
||||
|
@ -234,7 +234,7 @@ EXPR_STMT@[16; 58)
|
|||
WHITESPACE@[53; 54) " "
|
||||
STRING@[54; 56) "\"\""
|
||||
R_PAREN@[56; 57) ")"
|
||||
SEMI@[57; 58) ";"
|
||||
SEMICOLON@[57; 58) ";"
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
|
|
|
@ -63,7 +63,7 @@ fn on_char_typed_inner(
|
|||
fn on_eq_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChange> {
|
||||
assert_eq!(file.syntax().text().char_at(offset), Some('='));
|
||||
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
|
||||
if let_stmt.has_semi() {
|
||||
if let_stmt.semicolon_token().is_some() {
|
||||
return None;
|
||||
}
|
||||
if let Some(expr) = let_stmt.initializer() {
|
||||
|
|
|
@ -216,7 +216,7 @@ pub fn classify_name_ref(
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(record_field) = ast::RecordField::cast(parent.clone()) {
|
||||
if let Some(record_field) = ast::RecordField::for_field_name(name_ref) {
|
||||
tested_by!(goto_def_for_record_fields; force);
|
||||
tested_by!(goto_def_for_field_init_shorthand; force);
|
||||
if let Some((field, local)) = sema.resolve_record_field(&record_field) {
|
||||
|
|
|
@ -137,21 +137,23 @@ impl TokenMap {
|
|||
token_id: tt::TokenId,
|
||||
open_relative_range: TextRange,
|
||||
close_relative_range: TextRange,
|
||||
) {
|
||||
) -> usize {
|
||||
let res = self.entries.len();
|
||||
self.entries
|
||||
.push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
|
||||
res
|
||||
}
|
||||
|
||||
fn update_close_delim(&mut self, token_id: tt::TokenId, close_relative_range: TextRange) {
|
||||
if let Some(entry) = self.entries.iter_mut().find(|(tid, _)| *tid == token_id) {
|
||||
if let TokenTextRange::Delimiter(dim, _) = entry.1 {
|
||||
entry.1 = TokenTextRange::Delimiter(dim, close_relative_range);
|
||||
}
|
||||
fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
|
||||
let (_, token_text_range) = &mut self.entries[idx];
|
||||
if let TokenTextRange::Delimiter(dim, _) = token_text_range {
|
||||
*token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range);
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_delim(&mut self, token_id: tt::TokenId) {
|
||||
self.entries.retain(|(tid, _)| *tid != token_id);
|
||||
fn remove_delim(&mut self, idx: usize) {
|
||||
// FIXME: This could be accidently quadratic
|
||||
self.entries.remove(idx);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -238,24 +240,24 @@ impl TokenIdAlloc {
|
|||
token_id
|
||||
}
|
||||
|
||||
fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId {
|
||||
fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
|
||||
let token_id = tt::TokenId(self.next_id);
|
||||
self.next_id += 1;
|
||||
self.map.insert_delim(
|
||||
let idx = self.map.insert_delim(
|
||||
token_id,
|
||||
open_abs_range - self.global_offset,
|
||||
open_abs_range - self.global_offset,
|
||||
);
|
||||
token_id
|
||||
(token_id, idx)
|
||||
}
|
||||
|
||||
fn close_delim(&mut self, id: tt::TokenId, close_abs_range: Option<TextRange>) {
|
||||
fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
|
||||
match close_abs_range {
|
||||
None => {
|
||||
self.map.remove_delim(id);
|
||||
self.map.remove_delim(idx);
|
||||
}
|
||||
Some(close) => {
|
||||
self.map.update_close_delim(id, close - self.global_offset);
|
||||
self.map.update_close_delim(idx, close - self.global_offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -322,7 +324,7 @@ trait TokenConvertor {
|
|||
|
||||
if let Some((kind, closed)) = delim {
|
||||
let mut subtree = tt::Subtree::default();
|
||||
let id = self.id_alloc().open_delim(range);
|
||||
let (id, idx) = self.id_alloc().open_delim(range);
|
||||
subtree.delimiter = Some(tt::Delimiter { kind, id });
|
||||
|
||||
while self.peek().map(|it| it.kind() != closed).unwrap_or(false) {
|
||||
|
@ -331,7 +333,7 @@ trait TokenConvertor {
|
|||
let last_range = match self.bump() {
|
||||
None => {
|
||||
// For error resilience, we insert an char punct for the opening delim here
|
||||
self.id_alloc().close_delim(id, None);
|
||||
self.id_alloc().close_delim(idx, None);
|
||||
let leaf: tt::Leaf = tt::Punct {
|
||||
id: self.id_alloc().alloc(range),
|
||||
char: token.to_char().unwrap(),
|
||||
|
@ -344,7 +346,7 @@ trait TokenConvertor {
|
|||
}
|
||||
Some(it) => it.1,
|
||||
};
|
||||
self.id_alloc().close_delim(id, Some(last_range));
|
||||
self.id_alloc().close_delim(idx, Some(last_range));
|
||||
subtree.into()
|
||||
} else {
|
||||
let spacing = match self.peek() {
|
||||
|
|
|
@ -141,6 +141,79 @@ macro_rules! impl_froms {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_convert_tt2() {
|
||||
parse_macro(
|
||||
r#"
|
||||
macro_rules! impl_froms {
|
||||
($e:ident: $($v:ident),*) => {
|
||||
$(
|
||||
impl From<$v> for $e {
|
||||
fn from(it: $v) -> $e {
|
||||
$e::$v(it)
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.assert_expand(
|
||||
"impl_froms!(TokenTree: Leaf, Subtree);",
|
||||
r#"
|
||||
SUBTREE $
|
||||
IDENT impl 20
|
||||
IDENT From 21
|
||||
PUNCH < [joint] 22
|
||||
IDENT Leaf 53
|
||||
PUNCH > [alone] 25
|
||||
IDENT for 26
|
||||
IDENT TokenTree 51
|
||||
SUBTREE {} 29
|
||||
IDENT fn 30
|
||||
IDENT from 31
|
||||
SUBTREE () 32
|
||||
IDENT it 33
|
||||
PUNCH : [alone] 34
|
||||
IDENT Leaf 53
|
||||
PUNCH - [joint] 37
|
||||
PUNCH > [alone] 38
|
||||
IDENT TokenTree 51
|
||||
SUBTREE {} 41
|
||||
IDENT TokenTree 51
|
||||
PUNCH : [joint] 44
|
||||
PUNCH : [joint] 45
|
||||
IDENT Leaf 53
|
||||
SUBTREE () 48
|
||||
IDENT it 49
|
||||
IDENT impl 20
|
||||
IDENT From 21
|
||||
PUNCH < [joint] 22
|
||||
IDENT Subtree 55
|
||||
PUNCH > [alone] 25
|
||||
IDENT for 26
|
||||
IDENT TokenTree 51
|
||||
SUBTREE {} 29
|
||||
IDENT fn 30
|
||||
IDENT from 31
|
||||
SUBTREE () 32
|
||||
IDENT it 33
|
||||
PUNCH : [alone] 34
|
||||
IDENT Subtree 55
|
||||
PUNCH - [joint] 37
|
||||
PUNCH > [alone] 38
|
||||
IDENT TokenTree 51
|
||||
SUBTREE {} 41
|
||||
IDENT TokenTree 51
|
||||
PUNCH : [joint] 44
|
||||
PUNCH : [joint] 45
|
||||
IDENT Subtree 55
|
||||
SUBTREE () 48
|
||||
IDENT it 49
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_expr_order() {
|
||||
let expanded = parse_macro(
|
||||
|
@ -179,7 +252,7 @@ fn test_expr_order() {
|
|||
STAR@[11; 12) "*"
|
||||
LITERAL@[12; 13)
|
||||
INT_NUMBER@[12; 13) "2"
|
||||
SEMI@[13; 14) ";"
|
||||
SEMICOLON@[13; 14) ";"
|
||||
R_CURLY@[14; 15) "}""#,
|
||||
);
|
||||
}
|
||||
|
@ -532,7 +605,7 @@ fn test_tt_to_stmts() {
|
|||
EQ@[4; 5) "="
|
||||
LITERAL@[5; 6)
|
||||
INT_NUMBER@[5; 6) "0"
|
||||
SEMI@[6; 7) ";"
|
||||
SEMICOLON@[6; 7) ";"
|
||||
EXPR_STMT@[7; 14)
|
||||
BIN_EXPR@[7; 13)
|
||||
PATH_EXPR@[7; 8)
|
||||
|
@ -547,7 +620,7 @@ fn test_tt_to_stmts() {
|
|||
PLUS@[11; 12) "+"
|
||||
LITERAL@[12; 13)
|
||||
INT_NUMBER@[12; 13) "1"
|
||||
SEMI@[13; 14) ";"
|
||||
SEMICOLON@[13; 14) ";"
|
||||
EXPR_STMT@[14; 15)
|
||||
PATH_EXPR@[14; 15)
|
||||
PATH@[14; 15)
|
||||
|
@ -880,7 +953,7 @@ fn test_tt_composite2() {
|
|||
PATH_SEGMENT@[0; 3)
|
||||
NAME_REF@[0; 3)
|
||||
IDENT@[0; 3) "abs"
|
||||
EXCL@[3; 4) "!"
|
||||
BANG@[3; 4) "!"
|
||||
TOKEN_TREE@[4; 10)
|
||||
L_PAREN@[4; 5) "("
|
||||
EQ@[5; 6) "="
|
||||
|
@ -1000,14 +1073,14 @@ fn test_vec() {
|
|||
PATH_SEGMENT@[9; 12)
|
||||
NAME_REF@[9; 12)
|
||||
IDENT@[9; 12) "Vec"
|
||||
COLONCOLON@[12; 14) "::"
|
||||
COLON2@[12; 14) "::"
|
||||
PATH_SEGMENT@[14; 17)
|
||||
NAME_REF@[14; 17)
|
||||
IDENT@[14; 17) "new"
|
||||
ARG_LIST@[17; 19)
|
||||
L_PAREN@[17; 18) "("
|
||||
R_PAREN@[18; 19) ")"
|
||||
SEMI@[19; 20) ";"
|
||||
SEMICOLON@[19; 20) ";"
|
||||
EXPR_STMT@[20; 33)
|
||||
METHOD_CALL_EXPR@[20; 32)
|
||||
PATH_EXPR@[20; 21)
|
||||
|
@ -1023,7 +1096,7 @@ fn test_vec() {
|
|||
LITERAL@[27; 31)
|
||||
INT_NUMBER@[27; 31) "1u32"
|
||||
R_PAREN@[31; 32) ")"
|
||||
SEMI@[32; 33) ";"
|
||||
SEMICOLON@[32; 33) ";"
|
||||
EXPR_STMT@[33; 43)
|
||||
METHOD_CALL_EXPR@[33; 42)
|
||||
PATH_EXPR@[33; 34)
|
||||
|
@ -1039,7 +1112,7 @@ fn test_vec() {
|
|||
LITERAL@[40; 41)
|
||||
INT_NUMBER@[40; 41) "2"
|
||||
R_PAREN@[41; 42) ")"
|
||||
SEMI@[42; 43) ";"
|
||||
SEMICOLON@[42; 43) ";"
|
||||
PATH_EXPR@[43; 44)
|
||||
PATH@[43; 44)
|
||||
PATH_SEGMENT@[43; 44)
|
||||
|
@ -1479,6 +1552,12 @@ impl MacroFixture {
|
|||
assert_eq!(expansion.to_string(), expected);
|
||||
}
|
||||
|
||||
fn assert_expand(&self, invocation: &str, expected: &str) {
|
||||
let expansion = self.expand_tt(invocation);
|
||||
let actual = format!("{:?}", expansion);
|
||||
test_utils::assert_eq_text!(&actual.trim(), &expected.trim());
|
||||
}
|
||||
|
||||
fn assert_expand_items(&self, invocation: &str, expected: &str) -> &MacroFixture {
|
||||
self.assert_expansion(FragmentKind::Items, invocation, expected);
|
||||
self
|
||||
|
@ -1681,7 +1760,7 @@ fn test_no_space_after_semi_colon() {
|
|||
MOD_KW@[21; 24) "mod"
|
||||
NAME@[24; 25)
|
||||
IDENT@[24; 25) "m"
|
||||
SEMI@[25; 26) ";"
|
||||
SEMICOLON@[25; 26) ";"
|
||||
MODULE@[26; 52)
|
||||
ATTR@[26; 47)
|
||||
POUND@[26; 27) "#"
|
||||
|
@ -1700,7 +1779,7 @@ fn test_no_space_after_semi_colon() {
|
|||
MOD_KW@[47; 50) "mod"
|
||||
NAME@[50; 51)
|
||||
IDENT@[50; 51) "f"
|
||||
SEMI@[51; 52) ";""###,
|
||||
SEMICOLON@[51; 52) ";""###,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -339,7 +339,8 @@ fn expr_bp(p: &mut Parser, mut r: Restrictions, bp: u8) -> (Option<CompletedMark
|
|||
(Some(lhs), BlockLike::NotBlock)
|
||||
}
|
||||
|
||||
const LHS_FIRST: TokenSet = atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOT, MINUS]);
|
||||
const LHS_FIRST: TokenSet =
|
||||
atom::ATOM_EXPR_FIRST.union(token_set![T![&], T![*], T![!], T![.], T![-]]);
|
||||
|
||||
fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> {
|
||||
let m;
|
||||
|
@ -618,26 +619,39 @@ pub(crate) fn record_field_list(p: &mut Parser) {
|
|||
let m = p.start();
|
||||
p.bump(T!['{']);
|
||||
while !p.at(EOF) && !p.at(T!['}']) {
|
||||
match p.current() {
|
||||
let m = p.start();
|
||||
// test record_literal_field_with_attr
|
||||
// fn main() {
|
||||
// S { #[cfg(test)] field: 1 }
|
||||
// }
|
||||
IDENT | INT_NUMBER | T![#] => {
|
||||
let m = p.start();
|
||||
attributes::outer_attributes(p);
|
||||
|
||||
match p.current() {
|
||||
IDENT | INT_NUMBER => {
|
||||
// test_err record_literal_before_ellipsis_recovery
|
||||
// fn main() {
|
||||
// S { field ..S::default() }
|
||||
// }
|
||||
if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) {
|
||||
name_ref_or_index(p);
|
||||
if p.eat(T![:]) {
|
||||
expr(p);
|
||||
p.expect(T![:]);
|
||||
}
|
||||
expr(p);
|
||||
m.complete(p, RECORD_FIELD);
|
||||
}
|
||||
T![.] if p.at(T![..]) => {
|
||||
m.abandon(p);
|
||||
p.bump(T![..]);
|
||||
expr(p);
|
||||
}
|
||||
T!['{'] => error_block(p, "expected a field"),
|
||||
_ => p.err_and_bump("expected identifier"),
|
||||
T!['{'] => {
|
||||
error_block(p, "expected a field");
|
||||
m.abandon(p);
|
||||
}
|
||||
_ => {
|
||||
p.err_and_bump("expected identifier");
|
||||
m.abandon(p);
|
||||
}
|
||||
}
|
||||
if !p.at(T!['}']) {
|
||||
p.expect(T![,]);
|
||||
|
|
|
@ -3,8 +3,19 @@
|
|||
use super::*;
|
||||
|
||||
pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![
|
||||
L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, IMPL_KW,
|
||||
DYN_KW, L_ANGLE,
|
||||
T!['('],
|
||||
T!['['],
|
||||
T![<],
|
||||
T![!],
|
||||
T![*],
|
||||
T![&],
|
||||
T![_],
|
||||
T![fn],
|
||||
T![unsafe],
|
||||
T![extern],
|
||||
T![for],
|
||||
T![impl],
|
||||
T![dyn],
|
||||
]);
|
||||
|
||||
const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA, L_DOLLAR];
|
||||
|
|
|
@ -9,7 +9,7 @@ pub enum SyntaxKind {
|
|||
TOMBSTONE,
|
||||
#[doc(hidden)]
|
||||
EOF,
|
||||
SEMI,
|
||||
SEMICOLON,
|
||||
COMMA,
|
||||
L_PAREN,
|
||||
R_PAREN,
|
||||
|
@ -33,15 +33,15 @@ pub enum SyntaxKind {
|
|||
PERCENT,
|
||||
UNDERSCORE,
|
||||
DOT,
|
||||
DOTDOT,
|
||||
DOTDOTDOT,
|
||||
DOTDOTEQ,
|
||||
DOT2,
|
||||
DOT3,
|
||||
DOT2EQ,
|
||||
COLON,
|
||||
COLONCOLON,
|
||||
COLON2,
|
||||
EQ,
|
||||
EQEQ,
|
||||
EQ2,
|
||||
FAT_ARROW,
|
||||
EXCL,
|
||||
BANG,
|
||||
NEQ,
|
||||
MINUS,
|
||||
THIN_ARROW,
|
||||
|
@ -55,8 +55,8 @@ pub enum SyntaxKind {
|
|||
SLASHEQ,
|
||||
STAREQ,
|
||||
PERCENTEQ,
|
||||
AMPAMP,
|
||||
PIPEPIPE,
|
||||
AMP2,
|
||||
PIPE2,
|
||||
SHL,
|
||||
SHR,
|
||||
SHLEQ,
|
||||
|
@ -265,12 +265,12 @@ impl SyntaxKind {
|
|||
}
|
||||
pub fn is_punct(self) -> bool {
|
||||
match self {
|
||||
SEMI | COMMA | L_PAREN | R_PAREN | L_CURLY | R_CURLY | L_BRACK | R_BRACK | L_ANGLE
|
||||
| R_ANGLE | AT | POUND | TILDE | QUESTION | DOLLAR | AMP | PIPE | PLUS | STAR
|
||||
| SLASH | CARET | PERCENT | UNDERSCORE | DOT | DOTDOT | DOTDOTDOT | DOTDOTEQ
|
||||
| COLON | COLONCOLON | EQ | EQEQ | FAT_ARROW | EXCL | NEQ | MINUS | THIN_ARROW
|
||||
| LTEQ | GTEQ | PLUSEQ | MINUSEQ | PIPEEQ | AMPEQ | CARETEQ | SLASHEQ | STAREQ
|
||||
| PERCENTEQ | AMPAMP | PIPEPIPE | SHL | SHR | SHLEQ | SHREQ => true,
|
||||
SEMICOLON | COMMA | L_PAREN | R_PAREN | L_CURLY | R_CURLY | L_BRACK | R_BRACK
|
||||
| L_ANGLE | R_ANGLE | AT | POUND | TILDE | QUESTION | DOLLAR | AMP | PIPE | PLUS
|
||||
| STAR | SLASH | CARET | PERCENT | UNDERSCORE | DOT | DOT2 | DOT3 | DOT2EQ | COLON
|
||||
| COLON2 | EQ | EQ2 | FAT_ARROW | BANG | NEQ | MINUS | THIN_ARROW | LTEQ | GTEQ
|
||||
| PLUSEQ | MINUSEQ | PIPEEQ | AMPEQ | CARETEQ | SLASHEQ | STAREQ | PERCENTEQ | AMP2
|
||||
| PIPE2 | SHL | SHR | SHLEQ | SHREQ => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -329,7 +329,7 @@ impl SyntaxKind {
|
|||
}
|
||||
pub fn from_char(c: char) -> Option<SyntaxKind> {
|
||||
let tok = match c {
|
||||
';' => SEMI,
|
||||
';' => SEMICOLON,
|
||||
',' => COMMA,
|
||||
'(' => L_PAREN,
|
||||
')' => R_PAREN,
|
||||
|
@ -355,7 +355,7 @@ impl SyntaxKind {
|
|||
'.' => DOT,
|
||||
':' => COLON,
|
||||
'=' => EQ,
|
||||
'!' => EXCL,
|
||||
'!' => BANG,
|
||||
'-' => MINUS,
|
||||
_ => return None,
|
||||
};
|
||||
|
@ -363,296 +363,4 @@ impl SyntaxKind {
|
|||
}
|
||||
}
|
||||
#[macro_export]
|
||||
macro_rules! T {
|
||||
( ; ) => {
|
||||
$crate::SyntaxKind::SEMI
|
||||
};
|
||||
( , ) => {
|
||||
$crate::SyntaxKind::COMMA
|
||||
};
|
||||
( '(' ) => {
|
||||
$crate::SyntaxKind::L_PAREN
|
||||
};
|
||||
( ')' ) => {
|
||||
$crate::SyntaxKind::R_PAREN
|
||||
};
|
||||
( '{' ) => {
|
||||
$crate::SyntaxKind::L_CURLY
|
||||
};
|
||||
( '}' ) => {
|
||||
$crate::SyntaxKind::R_CURLY
|
||||
};
|
||||
( '[' ) => {
|
||||
$crate::SyntaxKind::L_BRACK
|
||||
};
|
||||
( ']' ) => {
|
||||
$crate::SyntaxKind::R_BRACK
|
||||
};
|
||||
( < ) => {
|
||||
$crate::SyntaxKind::L_ANGLE
|
||||
};
|
||||
( > ) => {
|
||||
$crate::SyntaxKind::R_ANGLE
|
||||
};
|
||||
( @ ) => {
|
||||
$crate::SyntaxKind::AT
|
||||
};
|
||||
( # ) => {
|
||||
$crate::SyntaxKind::POUND
|
||||
};
|
||||
( ~ ) => {
|
||||
$crate::SyntaxKind::TILDE
|
||||
};
|
||||
( ? ) => {
|
||||
$crate::SyntaxKind::QUESTION
|
||||
};
|
||||
( $ ) => {
|
||||
$crate::SyntaxKind::DOLLAR
|
||||
};
|
||||
( & ) => {
|
||||
$crate::SyntaxKind::AMP
|
||||
};
|
||||
( | ) => {
|
||||
$crate::SyntaxKind::PIPE
|
||||
};
|
||||
( + ) => {
|
||||
$crate::SyntaxKind::PLUS
|
||||
};
|
||||
( * ) => {
|
||||
$crate::SyntaxKind::STAR
|
||||
};
|
||||
( / ) => {
|
||||
$crate::SyntaxKind::SLASH
|
||||
};
|
||||
( ^ ) => {
|
||||
$crate::SyntaxKind::CARET
|
||||
};
|
||||
( % ) => {
|
||||
$crate::SyntaxKind::PERCENT
|
||||
};
|
||||
( _ ) => {
|
||||
$crate::SyntaxKind::UNDERSCORE
|
||||
};
|
||||
( . ) => {
|
||||
$crate::SyntaxKind::DOT
|
||||
};
|
||||
( .. ) => {
|
||||
$crate::SyntaxKind::DOTDOT
|
||||
};
|
||||
( ... ) => {
|
||||
$crate::SyntaxKind::DOTDOTDOT
|
||||
};
|
||||
( ..= ) => {
|
||||
$crate::SyntaxKind::DOTDOTEQ
|
||||
};
|
||||
( : ) => {
|
||||
$crate::SyntaxKind::COLON
|
||||
};
|
||||
( :: ) => {
|
||||
$crate::SyntaxKind::COLONCOLON
|
||||
};
|
||||
( = ) => {
|
||||
$crate::SyntaxKind::EQ
|
||||
};
|
||||
( == ) => {
|
||||
$crate::SyntaxKind::EQEQ
|
||||
};
|
||||
( => ) => {
|
||||
$crate::SyntaxKind::FAT_ARROW
|
||||
};
|
||||
( ! ) => {
|
||||
$crate::SyntaxKind::EXCL
|
||||
};
|
||||
( != ) => {
|
||||
$crate::SyntaxKind::NEQ
|
||||
};
|
||||
( - ) => {
|
||||
$crate::SyntaxKind::MINUS
|
||||
};
|
||||
( -> ) => {
|
||||
$crate::SyntaxKind::THIN_ARROW
|
||||
};
|
||||
( <= ) => {
|
||||
$crate::SyntaxKind::LTEQ
|
||||
};
|
||||
( >= ) => {
|
||||
$crate::SyntaxKind::GTEQ
|
||||
};
|
||||
( += ) => {
|
||||
$crate::SyntaxKind::PLUSEQ
|
||||
};
|
||||
( -= ) => {
|
||||
$crate::SyntaxKind::MINUSEQ
|
||||
};
|
||||
( |= ) => {
|
||||
$crate::SyntaxKind::PIPEEQ
|
||||
};
|
||||
( &= ) => {
|
||||
$crate::SyntaxKind::AMPEQ
|
||||
};
|
||||
( ^= ) => {
|
||||
$crate::SyntaxKind::CARETEQ
|
||||
};
|
||||
( /= ) => {
|
||||
$crate::SyntaxKind::SLASHEQ
|
||||
};
|
||||
( *= ) => {
|
||||
$crate::SyntaxKind::STAREQ
|
||||
};
|
||||
( %= ) => {
|
||||
$crate::SyntaxKind::PERCENTEQ
|
||||
};
|
||||
( && ) => {
|
||||
$crate::SyntaxKind::AMPAMP
|
||||
};
|
||||
( || ) => {
|
||||
$crate::SyntaxKind::PIPEPIPE
|
||||
};
|
||||
( << ) => {
|
||||
$crate::SyntaxKind::SHL
|
||||
};
|
||||
( >> ) => {
|
||||
$crate::SyntaxKind::SHR
|
||||
};
|
||||
( <<= ) => {
|
||||
$crate::SyntaxKind::SHLEQ
|
||||
};
|
||||
( >>= ) => {
|
||||
$crate::SyntaxKind::SHREQ
|
||||
};
|
||||
( as ) => {
|
||||
$crate::SyntaxKind::AS_KW
|
||||
};
|
||||
( async ) => {
|
||||
$crate::SyntaxKind::ASYNC_KW
|
||||
};
|
||||
( await ) => {
|
||||
$crate::SyntaxKind::AWAIT_KW
|
||||
};
|
||||
( box ) => {
|
||||
$crate::SyntaxKind::BOX_KW
|
||||
};
|
||||
( break ) => {
|
||||
$crate::SyntaxKind::BREAK_KW
|
||||
};
|
||||
( const ) => {
|
||||
$crate::SyntaxKind::CONST_KW
|
||||
};
|
||||
( continue ) => {
|
||||
$crate::SyntaxKind::CONTINUE_KW
|
||||
};
|
||||
( crate ) => {
|
||||
$crate::SyntaxKind::CRATE_KW
|
||||
};
|
||||
( dyn ) => {
|
||||
$crate::SyntaxKind::DYN_KW
|
||||
};
|
||||
( else ) => {
|
||||
$crate::SyntaxKind::ELSE_KW
|
||||
};
|
||||
( enum ) => {
|
||||
$crate::SyntaxKind::ENUM_KW
|
||||
};
|
||||
( extern ) => {
|
||||
$crate::SyntaxKind::EXTERN_KW
|
||||
};
|
||||
( false ) => {
|
||||
$crate::SyntaxKind::FALSE_KW
|
||||
};
|
||||
( fn ) => {
|
||||
$crate::SyntaxKind::FN_KW
|
||||
};
|
||||
( for ) => {
|
||||
$crate::SyntaxKind::FOR_KW
|
||||
};
|
||||
( if ) => {
|
||||
$crate::SyntaxKind::IF_KW
|
||||
};
|
||||
( impl ) => {
|
||||
$crate::SyntaxKind::IMPL_KW
|
||||
};
|
||||
( in ) => {
|
||||
$crate::SyntaxKind::IN_KW
|
||||
};
|
||||
( let ) => {
|
||||
$crate::SyntaxKind::LET_KW
|
||||
};
|
||||
( loop ) => {
|
||||
$crate::SyntaxKind::LOOP_KW
|
||||
};
|
||||
( macro ) => {
|
||||
$crate::SyntaxKind::MACRO_KW
|
||||
};
|
||||
( match ) => {
|
||||
$crate::SyntaxKind::MATCH_KW
|
||||
};
|
||||
( mod ) => {
|
||||
$crate::SyntaxKind::MOD_KW
|
||||
};
|
||||
( move ) => {
|
||||
$crate::SyntaxKind::MOVE_KW
|
||||
};
|
||||
( mut ) => {
|
||||
$crate::SyntaxKind::MUT_KW
|
||||
};
|
||||
( pub ) => {
|
||||
$crate::SyntaxKind::PUB_KW
|
||||
};
|
||||
( ref ) => {
|
||||
$crate::SyntaxKind::REF_KW
|
||||
};
|
||||
( return ) => {
|
||||
$crate::SyntaxKind::RETURN_KW
|
||||
};
|
||||
( self ) => {
|
||||
$crate::SyntaxKind::SELF_KW
|
||||
};
|
||||
( static ) => {
|
||||
$crate::SyntaxKind::STATIC_KW
|
||||
};
|
||||
( struct ) => {
|
||||
$crate::SyntaxKind::STRUCT_KW
|
||||
};
|
||||
( super ) => {
|
||||
$crate::SyntaxKind::SUPER_KW
|
||||
};
|
||||
( trait ) => {
|
||||
$crate::SyntaxKind::TRAIT_KW
|
||||
};
|
||||
( true ) => {
|
||||
$crate::SyntaxKind::TRUE_KW
|
||||
};
|
||||
( try ) => {
|
||||
$crate::SyntaxKind::TRY_KW
|
||||
};
|
||||
( type ) => {
|
||||
$crate::SyntaxKind::TYPE_KW
|
||||
};
|
||||
( unsafe ) => {
|
||||
$crate::SyntaxKind::UNSAFE_KW
|
||||
};
|
||||
( use ) => {
|
||||
$crate::SyntaxKind::USE_KW
|
||||
};
|
||||
( where ) => {
|
||||
$crate::SyntaxKind::WHERE_KW
|
||||
};
|
||||
( while ) => {
|
||||
$crate::SyntaxKind::WHILE_KW
|
||||
};
|
||||
( auto ) => {
|
||||
$crate::SyntaxKind::AUTO_KW
|
||||
};
|
||||
( default ) => {
|
||||
$crate::SyntaxKind::DEFAULT_KW
|
||||
};
|
||||
( existential ) => {
|
||||
$crate::SyntaxKind::EXISTENTIAL_KW
|
||||
};
|
||||
( union ) => {
|
||||
$crate::SyntaxKind::UNION_KW
|
||||
};
|
||||
( raw ) => {
|
||||
$crate::SyntaxKind::RAW_KW
|
||||
};
|
||||
}
|
||||
macro_rules ! T { [ ; ] => { $ crate :: SyntaxKind :: SEMICOLON } ; [ , ] => { $ crate :: SyntaxKind :: COMMA } ; [ '(' ] => { $ crate :: SyntaxKind :: L_PAREN } ; [ ')' ] => { $ crate :: SyntaxKind :: R_PAREN } ; [ '{' ] => { $ crate :: SyntaxKind :: L_CURLY } ; [ '}' ] => { $ crate :: SyntaxKind :: R_CURLY } ; [ '[' ] => { $ crate :: SyntaxKind :: L_BRACK } ; [ ']' ] => { $ crate :: SyntaxKind :: R_BRACK } ; [ < ] => { $ crate :: SyntaxKind :: L_ANGLE } ; [ > ] => { $ crate :: SyntaxKind :: R_ANGLE } ; [ @ ] => { $ crate :: SyntaxKind :: AT } ; [ # ] => { $ crate :: SyntaxKind :: POUND } ; [ ~ ] => { $ crate :: SyntaxKind :: TILDE } ; [ ? ] => { $ crate :: SyntaxKind :: QUESTION } ; [ $ ] => { $ crate :: SyntaxKind :: DOLLAR } ; [ & ] => { $ crate :: SyntaxKind :: AMP } ; [ | ] => { $ crate :: SyntaxKind :: PIPE } ; [ + ] => { $ crate :: SyntaxKind :: PLUS } ; [ * ] => { $ crate :: SyntaxKind :: STAR } ; [ / ] => { $ crate :: SyntaxKind :: SLASH } ; [ ^ ] => { $ crate :: SyntaxKind :: CARET } ; [ % ] => { $ crate :: SyntaxKind :: PERCENT } ; [ _ ] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [ . ] => { $ crate :: SyntaxKind :: DOT } ; [ .. ] => { $ crate :: SyntaxKind :: DOT2 } ; [ ... ] => { $ crate :: SyntaxKind :: DOT3 } ; [ ..= ] => { $ crate :: SyntaxKind :: DOT2EQ } ; [ : ] => { $ crate :: SyntaxKind :: COLON } ; [ :: ] => { $ crate :: SyntaxKind :: COLON2 } ; [ = ] => { $ crate :: SyntaxKind :: EQ } ; [ == ] => { $ crate :: SyntaxKind :: EQ2 } ; [ => ] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [ ! ] => { $ crate :: SyntaxKind :: BANG } ; [ != ] => { $ crate :: SyntaxKind :: NEQ } ; [ - ] => { $ crate :: SyntaxKind :: MINUS } ; [ -> ] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [ <= ] => { $ crate :: SyntaxKind :: LTEQ } ; [ >= ] => { $ crate :: SyntaxKind :: GTEQ } ; [ += ] => { $ crate :: SyntaxKind :: PLUSEQ } ; [ -= ] => { $ crate :: SyntaxKind :: MINUSEQ } ; [ |= ] => { $ crate :: SyntaxKind :: PIPEEQ } ; [ &= ] => { $ crate :: SyntaxKind :: AMPEQ } ; [ ^= ] => { $ crate :: SyntaxKind :: CARETEQ } ; [ /= ] => { $ crate :: SyntaxKind :: SLASHEQ } ; [ *= ] => { $ crate :: SyntaxKind :: STAREQ } ; [ %= ] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [ && ] => { $ crate :: SyntaxKind :: AMP2 } ; [ || ] => { $ crate :: SyntaxKind :: PIPE2 } ; [ << ] => { $ crate :: SyntaxKind :: SHL } ; [ >> ] => { $ crate :: SyntaxKind :: SHR } ; [ <<= ] => { $ crate :: SyntaxKind :: SHLEQ } ; [ >>= ] => { $ crate :: SyntaxKind :: SHREQ } ; [ as ] => { $ crate :: SyntaxKind :: AS_KW } ; [ async ] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [ await ] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [ box ] => { $ crate :: SyntaxKind :: BOX_KW } ; [ break ] => { $ crate :: SyntaxKind :: BREAK_KW } ; [ const ] => { $ crate :: SyntaxKind :: CONST_KW } ; [ continue ] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [ crate ] => { $ crate :: SyntaxKind :: CRATE_KW } ; [ dyn ] => { $ crate :: SyntaxKind :: DYN_KW } ; [ else ] => { $ crate :: SyntaxKind :: ELSE_KW } ; [ enum ] => { $ crate :: SyntaxKind :: ENUM_KW } ; [ extern ] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [ false ] => { $ crate :: SyntaxKind :: FALSE_KW } ; [ fn ] => { $ crate :: SyntaxKind :: FN_KW } ; [ for ] => { $ crate :: SyntaxKind :: FOR_KW } ; [ if ] => { $ crate :: SyntaxKind :: IF_KW } ; [ impl ] => { $ crate :: SyntaxKind :: IMPL_KW } ; [ in ] => { $ crate :: SyntaxKind :: IN_KW } ; [ let ] => { $ crate :: SyntaxKind :: LET_KW } ; [ loop ] => { $ crate :: SyntaxKind :: LOOP_KW } ; [ macro ] => { $ crate :: SyntaxKind :: MACRO_KW } ; [ match ] => { $ crate :: SyntaxKind :: MATCH_KW } ; [ mod ] => { $ crate :: SyntaxKind :: MOD_KW } ; [ move ] => { $ crate :: SyntaxKind :: MOVE_KW } ; [ mut ] => { $ crate :: SyntaxKind :: MUT_KW } ; [ pub ] => { $ crate :: SyntaxKind :: PUB_KW } ; [ ref ] => { $ crate :: SyntaxKind :: REF_KW } ; [ return ] => { $ crate :: SyntaxKind :: RETURN_KW } ; [ self ] => { $ crate :: SyntaxKind :: SELF_KW } ; [ static ] => { $ crate :: SyntaxKind :: STATIC_KW } ; [ struct ] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [ super ] => { $ crate :: SyntaxKind :: SUPER_KW } ; [ trait ] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [ true ] => { $ crate :: SyntaxKind :: TRUE_KW } ; [ try ] => { $ crate :: SyntaxKind :: TRY_KW } ; [ type ] => { $ crate :: SyntaxKind :: TYPE_KW } ; [ unsafe ] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [ use ] => { $ crate :: SyntaxKind :: USE_KW } ; [ where ] => { $ crate :: SyntaxKind :: WHERE_KW } ; [ while ] => { $ crate :: SyntaxKind :: WHILE_KW } ; [ auto ] => { $ crate :: SyntaxKind :: AUTO_KW } ; [ default ] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [ existential ] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [ union ] => { $ crate :: SyntaxKind :: UNION_KW } ; [ raw ] => { $ crate :: SyntaxKind :: RAW_KW } ; [ lifetime ] => { $ crate :: SyntaxKind :: LIFETIME } ; [ ident ] => { $ crate :: SyntaxKind :: IDENT } ; }
|
||||
|
|
|
@ -12,6 +12,9 @@ doctest = false
|
|||
ra_tt = { path = "../ra_tt" }
|
||||
ra_mbe = { path = "../ra_mbe" }
|
||||
ra_proc_macro = { path = "../ra_proc_macro" }
|
||||
goblin = "0.2.1"
|
||||
libloading = "0.6.0"
|
||||
test_utils = { path = "../test_utils" }
|
||||
|
||||
[dev-dependencies]
|
||||
cargo_metadata = "0.9.1"
|
||||
|
|
211
crates/ra_proc_macro_srv/src/dylib.rs
Normal file
211
crates/ra_proc_macro_srv/src/dylib.rs
Normal file
|
@ -0,0 +1,211 @@
|
|||
//! Handles dynamic library loading for proc macro
|
||||
|
||||
use crate::{proc_macro::bridge, rustc_server::TokenStream};
|
||||
use std::path::Path;
|
||||
|
||||
use goblin::{mach::Mach, Object};
|
||||
use libloading::Library;
|
||||
use ra_proc_macro::ProcMacroKind;
|
||||
|
||||
use std::io::Error as IoError;
|
||||
use std::io::ErrorKind as IoErrorKind;
|
||||
|
||||
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
|
||||
|
||||
fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> IoError {
|
||||
IoError::new(IoErrorKind::InvalidData, e)
|
||||
}
|
||||
|
||||
fn get_symbols_from_lib(file: &Path) -> Result<Vec<String>, IoError> {
|
||||
let buffer = std::fs::read(file)?;
|
||||
let object = Object::parse(&buffer).map_err(invalid_data_err)?;
|
||||
|
||||
match object {
|
||||
Object::Elf(elf) => {
|
||||
let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?;
|
||||
let names = symbols.iter().map(|s| s.to_string()).collect();
|
||||
Ok(names)
|
||||
}
|
||||
Object::PE(pe) => {
|
||||
let symbol_names =
|
||||
pe.exports.iter().flat_map(|s| s.name).map(|n| n.to_string()).collect();
|
||||
Ok(symbol_names)
|
||||
}
|
||||
Object::Mach(mach) => match mach {
|
||||
Mach::Binary(binary) => {
|
||||
let exports = binary.exports().map_err(invalid_data_err)?;
|
||||
let names = exports
|
||||
.into_iter()
|
||||
.map(|s| {
|
||||
// In macos doc:
|
||||
// https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dlsym.3.html
|
||||
// Unlike other dyld API's, the symbol name passed to dlsym() must NOT be
|
||||
// prepended with an underscore.
|
||||
if s.name.starts_with("_") {
|
||||
s.name[1..].to_string()
|
||||
} else {
|
||||
s.name
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Ok(names)
|
||||
}
|
||||
Mach::Fat(_) => Ok(vec![]),
|
||||
},
|
||||
Object::Archive(_) | Object::Unknown(_) => Ok(vec![]),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_derive_registrar_symbol(symbol: &str) -> bool {
|
||||
symbol.contains(NEW_REGISTRAR_SYMBOL)
|
||||
}
|
||||
|
||||
fn find_registrar_symbol(file: &Path) -> Result<Option<String>, IoError> {
|
||||
let symbols = get_symbols_from_lib(file)?;
|
||||
Ok(symbols.into_iter().find(|s| is_derive_registrar_symbol(s)))
|
||||
}
|
||||
|
||||
/// Loads dynamic library in platform dependent manner.
|
||||
///
|
||||
/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described
|
||||
/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample)
|
||||
/// and [here](https://github.com/rust-lang/rust/issues/60593).
|
||||
///
|
||||
/// Usage of RTLD_DEEPBIND
|
||||
/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1)
|
||||
///
|
||||
/// It seems that on Windows that behaviour is default, so we do nothing in that case.
|
||||
#[cfg(windows)]
|
||||
fn load_library(file: &Path) -> Result<Library, libloading::Error> {
|
||||
Library::new(file)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn load_library(file: &Path) -> Result<Library, libloading::Error> {
|
||||
use libloading::os::unix::Library as UnixLibrary;
|
||||
use std::os::raw::c_int;
|
||||
|
||||
const RTLD_NOW: c_int = 0x00002;
|
||||
const RTLD_DEEPBIND: c_int = 0x00008;
|
||||
|
||||
UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into())
|
||||
}
|
||||
|
||||
struct ProcMacroLibraryLibloading {
|
||||
// Hold the dylib to prevent it for unloadeding
|
||||
_lib: Library,
|
||||
exported_macros: Vec<bridge::client::ProcMacro>,
|
||||
}
|
||||
|
||||
impl ProcMacroLibraryLibloading {
|
||||
fn open(file: &Path) -> Result<Self, IoError> {
|
||||
let symbol_name = find_registrar_symbol(file)?
|
||||
.ok_or(invalid_data_err(format!("Cannot find registrar symbol in file {:?}", file)))?;
|
||||
|
||||
let lib = load_library(file).map_err(invalid_data_err)?;
|
||||
let exported_macros = {
|
||||
let macros: libloading::Symbol<&&[bridge::client::ProcMacro]> =
|
||||
unsafe { lib.get(symbol_name.as_bytes()) }.map_err(invalid_data_err)?;
|
||||
macros.to_vec()
|
||||
};
|
||||
|
||||
Ok(ProcMacroLibraryLibloading { _lib: lib, exported_macros })
|
||||
}
|
||||
}
|
||||
|
||||
type ProcMacroLibraryImpl = ProcMacroLibraryLibloading;
|
||||
|
||||
pub struct Expander {
|
||||
libs: Vec<ProcMacroLibraryImpl>,
|
||||
}
|
||||
|
||||
impl Expander {
|
||||
pub fn new<P: AsRef<Path>>(lib: &P) -> Result<Expander, String> {
|
||||
let mut libs = vec![];
|
||||
/* Some libraries for dynamic loading require canonicalized path (even when it is
|
||||
already absolute
|
||||
*/
|
||||
let lib =
|
||||
lib.as_ref().canonicalize().expect(&format!("Cannot canonicalize {:?}", lib.as_ref()));
|
||||
|
||||
let library = ProcMacroLibraryImpl::open(&lib).map_err(|e| e.to_string())?;
|
||||
libs.push(library);
|
||||
|
||||
Ok(Expander { libs })
|
||||
}
|
||||
|
||||
pub fn expand(
|
||||
&self,
|
||||
macro_name: &str,
|
||||
macro_body: &ra_tt::Subtree,
|
||||
attributes: Option<&ra_tt::Subtree>,
|
||||
) -> Result<ra_tt::Subtree, bridge::PanicMessage> {
|
||||
let parsed_body = TokenStream::with_subtree(macro_body.clone());
|
||||
|
||||
let parsed_attributes = attributes
|
||||
.map_or(crate::rustc_server::TokenStream::new(), |attr| {
|
||||
TokenStream::with_subtree(attr.clone())
|
||||
});
|
||||
|
||||
for lib in &self.libs {
|
||||
for proc_macro in &lib.exported_macros {
|
||||
match proc_macro {
|
||||
bridge::client::ProcMacro::CustomDerive { trait_name, client, .. }
|
||||
if *trait_name == macro_name =>
|
||||
{
|
||||
let res = client.run(
|
||||
&crate::proc_macro::bridge::server::SameThread,
|
||||
crate::rustc_server::Rustc::default(),
|
||||
parsed_body,
|
||||
);
|
||||
return res.map(|it| it.subtree);
|
||||
}
|
||||
bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
|
||||
let res = client.run(
|
||||
&crate::proc_macro::bridge::server::SameThread,
|
||||
crate::rustc_server::Rustc::default(),
|
||||
parsed_body,
|
||||
);
|
||||
return res.map(|it| it.subtree);
|
||||
}
|
||||
bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
|
||||
let res = client.run(
|
||||
&crate::proc_macro::bridge::server::SameThread,
|
||||
crate::rustc_server::Rustc::default(),
|
||||
parsed_attributes,
|
||||
parsed_body,
|
||||
);
|
||||
|
||||
return res.map(|it| it.subtree);
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(bridge::PanicMessage::String("Nothing to expand".to_string()))
|
||||
}
|
||||
|
||||
pub fn list_macros(&self) -> Result<Vec<(String, ProcMacroKind)>, bridge::PanicMessage> {
|
||||
let mut result = vec![];
|
||||
|
||||
for lib in &self.libs {
|
||||
for proc_macro in &lib.exported_macros {
|
||||
let res = match proc_macro {
|
||||
bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
|
||||
(trait_name.to_string(), ProcMacroKind::CustomDerive)
|
||||
}
|
||||
bridge::client::ProcMacro::Bang { name, .. } => {
|
||||
(name.to_string(), ProcMacroKind::FuncLike)
|
||||
}
|
||||
bridge::client::ProcMacro::Attr { name, .. } => {
|
||||
(name.to_string(), ProcMacroKind::Attr)
|
||||
}
|
||||
};
|
||||
result.push(res);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
|
@ -17,13 +17,41 @@ mod proc_macro;
|
|||
#[doc(hidden)]
|
||||
mod rustc_server;
|
||||
|
||||
mod dylib;
|
||||
|
||||
use proc_macro::bridge::client::TokenStream;
|
||||
use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask};
|
||||
|
||||
pub fn expand_task(_task: &ExpansionTask) -> Result<ExpansionResult, String> {
|
||||
unimplemented!()
|
||||
pub fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> {
|
||||
let expander = dylib::Expander::new(&task.lib)
|
||||
.expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib));
|
||||
|
||||
match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) {
|
||||
Ok(expansion) => Ok(ExpansionResult { expansion }),
|
||||
Err(msg) => {
|
||||
let reason = format!(
|
||||
"Cannot perform expansion for {}: error {:?}!",
|
||||
&task.macro_name,
|
||||
msg.as_str()
|
||||
);
|
||||
Err(reason)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn list_macros(_task: &ListMacrosTask) -> Result<ListMacrosResult, String> {
|
||||
unimplemented!()
|
||||
pub fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> {
|
||||
let expander = dylib::Expander::new(&task.lib)
|
||||
.expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib));
|
||||
|
||||
match expander.list_macros() {
|
||||
Ok(macros) => Ok(ListMacrosResult { macros }),
|
||||
Err(msg) => {
|
||||
let reason =
|
||||
format!("Cannot perform expansion for {:?}: error {:?}!", &task.lib, msg.as_str());
|
||||
Err(reason)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
|
|
@ -34,6 +34,10 @@ impl TokenStream {
|
|||
TokenStream { subtree: Default::default() }
|
||||
}
|
||||
|
||||
pub fn with_subtree(subtree: tt::Subtree) -> Self {
|
||||
TokenStream { subtree }
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.subtree.token_trees.is_empty()
|
||||
}
|
||||
|
|
188
crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt
vendored
Normal file
188
crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt
vendored
Normal file
|
@ -0,0 +1,188 @@
|
|||
SUBTREE $
|
||||
PUNCH # [alone] 4294967295
|
||||
SUBTREE [] 4294967295
|
||||
IDENT allow 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT non_upper_case_globals 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
IDENT unused_attributes 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
IDENT unused_qualifications 4294967295
|
||||
IDENT const 4294967295
|
||||
IDENT _IMPL_SERIALIZE_FOR_Foo 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
SUBTREE () 4294967295
|
||||
PUNCH = [alone] 4294967295
|
||||
SUBTREE {} 4294967295
|
||||
PUNCH # [alone] 4294967295
|
||||
SUBTREE [] 4294967295
|
||||
IDENT allow 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT unknown_lints 4294967295
|
||||
PUNCH # [alone] 4294967295
|
||||
SUBTREE [] 4294967295
|
||||
IDENT cfg_attr 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT feature 4294967295
|
||||
PUNCH = [alone] 4294967295
|
||||
SUBTREE $
|
||||
LITERAL "cargo-clippy" 0
|
||||
PUNCH , [alone] 4294967295
|
||||
IDENT allow 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT useless_attribute 4294967295
|
||||
PUNCH # [alone] 4294967295
|
||||
SUBTREE [] 4294967295
|
||||
IDENT allow 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT rust_2018_idioms 4294967295
|
||||
IDENT extern 4294967295
|
||||
IDENT crate 4294967295
|
||||
IDENT serde 4294967295
|
||||
IDENT as 4294967295
|
||||
IDENT _serde 4294967295
|
||||
PUNCH ; [alone] 4294967295
|
||||
PUNCH # [alone] 4294967295
|
||||
SUBTREE [] 4294967295
|
||||
IDENT allow 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT unused_macros 4294967295
|
||||
IDENT macro_rules 4294967295
|
||||
PUNCH ! [alone] 4294967295
|
||||
IDENT try 4294967295
|
||||
SUBTREE {} 4294967295
|
||||
SUBTREE () 4294967295
|
||||
PUNCH $ [alone] 4294967295
|
||||
IDENT __expr 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT expr 4294967295
|
||||
PUNCH = [joint] 4294967295
|
||||
PUNCH > [alone] 4294967295
|
||||
SUBTREE {} 4294967295
|
||||
IDENT match 4294967295
|
||||
PUNCH $ [alone] 4294967295
|
||||
IDENT __expr 4294967295
|
||||
SUBTREE {} 4294967295
|
||||
IDENT _serde 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT export 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT Ok 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT __val 4294967295
|
||||
PUNCH = [joint] 4294967295
|
||||
PUNCH > [alone] 4294967295
|
||||
IDENT __val 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
IDENT _serde 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT export 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT Err 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT __err 4294967295
|
||||
PUNCH = [joint] 4294967295
|
||||
PUNCH > [alone] 4294967295
|
||||
SUBTREE {} 4294967295
|
||||
IDENT return 4294967295
|
||||
IDENT _serde 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT export 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT Err 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT __err 4294967295
|
||||
PUNCH ; [alone] 4294967295
|
||||
PUNCH # [alone] 4294967295
|
||||
SUBTREE [] 4294967295
|
||||
IDENT automatically_derived 4294967295
|
||||
IDENT impl 4294967295
|
||||
IDENT _serde 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT Serialize 4294967295
|
||||
IDENT for 4294967295
|
||||
IDENT Foo 1
|
||||
SUBTREE {} 4294967295
|
||||
IDENT fn 4294967295
|
||||
IDENT serialize 4294967295
|
||||
PUNCH < [alone] 4294967295
|
||||
IDENT __S 4294967295
|
||||
PUNCH > [alone] 4294967295
|
||||
SUBTREE () 4294967295
|
||||
PUNCH & [alone] 4294967295
|
||||
IDENT self 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
IDENT __serializer 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT __S 4294967295
|
||||
PUNCH - [joint] 4294967295
|
||||
PUNCH > [alone] 4294967295
|
||||
IDENT _serde 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT export 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT Result 4294967295
|
||||
PUNCH < [alone] 4294967295
|
||||
IDENT __S 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT Ok 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
IDENT __S 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT Error 4294967295
|
||||
PUNCH > [alone] 4294967295
|
||||
IDENT where 4294967295
|
||||
IDENT __S 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT _serde 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT Serializer 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
SUBTREE {} 4294967295
|
||||
IDENT let 4294967295
|
||||
IDENT __serde_state 4294967295
|
||||
PUNCH = [alone] 4294967295
|
||||
IDENT try 4294967295
|
||||
PUNCH ! [alone] 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT _serde 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT Serializer 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT serialize_struct 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT __serializer 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
LITERAL "Foo" 4294967295
|
||||
PUNCH , [alone] 4294967295
|
||||
IDENT false 4294967295
|
||||
IDENT as 4294967295
|
||||
IDENT usize 4294967295
|
||||
PUNCH ; [alone] 4294967295
|
||||
IDENT _serde 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT ser 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT SerializeStruct 4294967295
|
||||
PUNCH : [joint] 4294967295
|
||||
PUNCH : [alone] 4294967295
|
||||
IDENT end 4294967295
|
||||
SUBTREE () 4294967295
|
||||
IDENT __serde_state 4294967295
|
||||
PUNCH ; [alone] 4294967295
|
47
crates/ra_proc_macro_srv/src/tests/mod.rs
Normal file
47
crates/ra_proc_macro_srv/src/tests/mod.rs
Normal file
|
@ -0,0 +1,47 @@
|
|||
//! proc-macro tests
|
||||
|
||||
#[macro_use]
|
||||
mod utils;
|
||||
use test_utils::assert_eq_text;
|
||||
use utils::*;
|
||||
|
||||
#[test]
|
||||
fn test_derive_serialize_proc_macro() {
|
||||
assert_expand(
|
||||
"serde_derive",
|
||||
"Serialize",
|
||||
"1.0.104",
|
||||
r##"struct Foo {}"##,
|
||||
include_str!("fixtures/test_serialize_proc_macro.txt"),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_derive_serialize_proc_macro_failed() {
|
||||
assert_expand(
|
||||
"serde_derive",
|
||||
"Serialize",
|
||||
"1.0.104",
|
||||
r##"
|
||||
struct {}
|
||||
"##,
|
||||
r##"
|
||||
SUBTREE $
|
||||
IDENT compile_error 4294967295
|
||||
PUNCH ! [alone] 4294967295
|
||||
SUBTREE {} 4294967295
|
||||
LITERAL "expected identifier" 4294967295
|
||||
"##,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_derive_proc_macro_list() {
|
||||
let res = list("serde_derive", "1.0.104").join("\n");
|
||||
|
||||
assert_eq_text!(
|
||||
&res,
|
||||
r#"Serialize [CustomDerive]
|
||||
Deserialize [CustomDerive]"#
|
||||
);
|
||||
}
|
65
crates/ra_proc_macro_srv/src/tests/utils.rs
Normal file
65
crates/ra_proc_macro_srv/src/tests/utils.rs
Normal file
|
@ -0,0 +1,65 @@
|
|||
//! utils used in proc-macro tests
|
||||
|
||||
use crate::dylib;
|
||||
use crate::list_macros;
|
||||
pub use difference::Changeset as __Changeset;
|
||||
use ra_proc_macro::ListMacrosTask;
|
||||
use std::str::FromStr;
|
||||
use test_utils::assert_eq_text;
|
||||
|
||||
mod fixtures {
|
||||
use cargo_metadata::{parse_messages, Message};
|
||||
use std::process::Command;
|
||||
|
||||
// Use current project metadata to get the proc-macro dylib path
|
||||
pub fn dylib_path(crate_name: &str, version: &str) -> std::path::PathBuf {
|
||||
let command = Command::new("cargo")
|
||||
.args(&["check", "--message-format", "json"])
|
||||
.output()
|
||||
.unwrap()
|
||||
.stdout;
|
||||
|
||||
for message in parse_messages(command.as_slice()) {
|
||||
match message.unwrap() {
|
||||
Message::CompilerArtifact(artifact) => {
|
||||
if artifact.target.kind.contains(&"proc-macro".to_string()) {
|
||||
let repr = format!("{} {}", crate_name, version);
|
||||
if artifact.package_id.repr.starts_with(&repr) {
|
||||
return artifact.filenames[0].clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => (), // Unknown message
|
||||
}
|
||||
}
|
||||
|
||||
panic!("No proc-macro dylib for {} found!", crate_name);
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_string(code: &str) -> Option<crate::rustc_server::TokenStream> {
|
||||
Some(crate::rustc_server::TokenStream::from_str(code).unwrap())
|
||||
}
|
||||
|
||||
pub fn assert_expand(
|
||||
crate_name: &str,
|
||||
macro_name: &str,
|
||||
version: &str,
|
||||
fixture: &str,
|
||||
expect: &str,
|
||||
) {
|
||||
let path = fixtures::dylib_path(crate_name, version);
|
||||
let expander = dylib::Expander::new(&path).unwrap();
|
||||
let fixture = parse_string(fixture).unwrap();
|
||||
|
||||
let res = expander.expand(macro_name, &fixture.subtree, None).unwrap();
|
||||
assert_eq_text!(&format!("{:?}", res), &expect.trim());
|
||||
}
|
||||
|
||||
pub fn list(crate_name: &str, version: &str) -> Vec<String> {
|
||||
let path = fixtures::dylib_path(crate_name, version);
|
||||
let task = ListMacrosTask { lib: path };
|
||||
|
||||
let res = list_macros(&task).unwrap();
|
||||
res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()
|
||||
}
|
|
@ -113,21 +113,6 @@ pub fn profile(label: Label) -> Profiler {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn print_time(label: Label) -> impl Drop {
|
||||
struct Guard {
|
||||
label: Label,
|
||||
start: Instant,
|
||||
}
|
||||
|
||||
impl Drop for Guard {
|
||||
fn drop(&mut self) {
|
||||
eprintln!("{}: {:?}", self.label, self.start.elapsed())
|
||||
}
|
||||
}
|
||||
|
||||
Guard { label, start: Instant::now() }
|
||||
}
|
||||
|
||||
pub struct Profiler {
|
||||
label: Option<Label>,
|
||||
detail: Option<String>,
|
||||
|
|
|
@ -42,11 +42,6 @@ pub trait AstNode {
|
|||
fn syntax(&self) -> &SyntaxNode;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assert_ast_is_object_safe() {
|
||||
fn _f(_: &dyn AstNode, _: &dyn NameOwner) {}
|
||||
}
|
||||
|
||||
/// Like `AstNode`, but wraps tokens rather than interior nodes.
|
||||
pub trait AstToken {
|
||||
fn can_cast(token: SyntaxKind) -> bool
|
||||
|
@ -64,22 +59,6 @@ pub trait AstToken {
|
|||
}
|
||||
}
|
||||
|
||||
mod support {
|
||||
use super::{AstChildren, AstNode, AstToken, SyntaxNode};
|
||||
|
||||
pub(super) fn child<N: AstNode>(parent: &SyntaxNode) -> Option<N> {
|
||||
parent.children().find_map(N::cast)
|
||||
}
|
||||
|
||||
pub(super) fn children<N: AstNode>(parent: &SyntaxNode) -> AstChildren<N> {
|
||||
AstChildren::new(parent)
|
||||
}
|
||||
|
||||
pub(super) fn token<T: AstToken>(parent: &SyntaxNode) -> Option<T> {
|
||||
parent.children_with_tokens().filter_map(|it| it.into_token()).find_map(T::cast)
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over `SyntaxNode` children of a particular AST type.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AstChildren<N> {
|
||||
|
@ -100,12 +79,25 @@ impl<N: AstNode> Iterator for AstChildren<N> {
|
|||
}
|
||||
}
|
||||
|
||||
fn child_opt<P: AstNode + ?Sized, C: AstNode>(parent: &P) -> Option<C> {
|
||||
children(parent).next()
|
||||
mod support {
|
||||
use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken};
|
||||
|
||||
pub(super) fn child<N: AstNode>(parent: &SyntaxNode) -> Option<N> {
|
||||
parent.children().find_map(N::cast)
|
||||
}
|
||||
|
||||
pub(super) fn children<N: AstNode>(parent: &SyntaxNode) -> AstChildren<N> {
|
||||
AstChildren::new(parent)
|
||||
}
|
||||
|
||||
pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option<SyntaxToken> {
|
||||
parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind)
|
||||
}
|
||||
}
|
||||
|
||||
fn children<P: AstNode + ?Sized, C: AstNode>(parent: &P) -> AstChildren<C> {
|
||||
AstChildren::new(parent.syntax())
|
||||
#[test]
|
||||
fn assert_ast_is_object_safe() {
|
||||
fn _f(_: &dyn AstNode, _: &dyn NameOwner) {}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -287,7 +279,7 @@ where
|
|||
let pred = predicates.next().unwrap();
|
||||
let mut bounds = pred.type_bound_list().unwrap().bounds();
|
||||
|
||||
assert_eq!("'a", pred.lifetime().unwrap().text());
|
||||
assert_eq!("'a", pred.lifetime_token().unwrap().text());
|
||||
|
||||
assert_bound("'b", bounds.next());
|
||||
assert_bound("'c", bounds.next());
|
||||
|
|
|
@ -6,7 +6,7 @@ use std::{iter, ops::RangeInclusive};
|
|||
use arrayvec::ArrayVec;
|
||||
|
||||
use crate::{
|
||||
algo,
|
||||
algo::{self, neighbor, SyntaxRewriter},
|
||||
ast::{
|
||||
self,
|
||||
make::{self, tokens},
|
||||
|
@ -16,7 +16,6 @@ use crate::{
|
|||
SyntaxKind::{ATTR, COMMENT, WHITESPACE},
|
||||
SyntaxNode, SyntaxToken, T,
|
||||
};
|
||||
use algo::{neighbor, SyntaxRewriter};
|
||||
|
||||
impl ast::BinExpr {
|
||||
#[must_use]
|
||||
|
@ -96,10 +95,10 @@ impl ast::ItemList {
|
|||
leading_indent(it.syntax()).unwrap_or_default().to_string(),
|
||||
InsertPosition::After(it.syntax().clone().into()),
|
||||
),
|
||||
None => match self.l_curly() {
|
||||
None => match self.l_curly_token() {
|
||||
Some(it) => (
|
||||
" ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(),
|
||||
InsertPosition::After(it.syntax().clone().into()),
|
||||
InsertPosition::After(it.into()),
|
||||
),
|
||||
None => return self.clone(),
|
||||
},
|
||||
|
@ -142,8 +141,8 @@ impl ast::RecordFieldList {
|
|||
|
||||
macro_rules! after_l_curly {
|
||||
() => {{
|
||||
let anchor = match self.l_curly() {
|
||||
Some(it) => it.syntax().clone().into(),
|
||||
let anchor = match self.l_curly_token() {
|
||||
Some(it) => it.into(),
|
||||
None => return self.clone(),
|
||||
};
|
||||
InsertPosition::After(anchor)
|
||||
|
@ -190,15 +189,15 @@ impl ast::RecordFieldList {
|
|||
impl ast::TypeParam {
|
||||
#[must_use]
|
||||
pub fn remove_bounds(&self) -> ast::TypeParam {
|
||||
let colon = match self.colon() {
|
||||
let colon = match self.colon_token() {
|
||||
Some(it) => it,
|
||||
None => return self.clone(),
|
||||
};
|
||||
let end = match self.type_bound_list() {
|
||||
Some(it) => it.syntax().clone().into(),
|
||||
None => colon.syntax().clone().into(),
|
||||
None => colon.clone().into(),
|
||||
};
|
||||
self.replace_children(colon.syntax().clone().into()..=end, iter::empty())
|
||||
self.replace_children(colon.into()..=end, iter::empty())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -301,7 +300,7 @@ impl ast::UseTree {
|
|||
suffix.clone(),
|
||||
self.use_tree_list(),
|
||||
self.alias(),
|
||||
self.star().is_some(),
|
||||
self.star_token().is_some(),
|
||||
);
|
||||
let nested = make::use_tree_list(iter::once(use_tree));
|
||||
return make::use_tree(prefix.clone(), Some(nested), None, false);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Various extension methods to ast Expr Nodes, which are hard to code-generate.
|
||||
|
||||
use crate::{
|
||||
ast::{self, child_opt, children, AstChildren, AstNode},
|
||||
ast::{self, support, AstChildren, AstNode},
|
||||
SmolStr,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, T,
|
||||
|
@ -36,7 +36,7 @@ impl ast::IfExpr {
|
|||
let res = match self.blocks().nth(1) {
|
||||
Some(block) => ElseBranch::Block(block),
|
||||
None => {
|
||||
let elif: ast::IfExpr = child_opt(self)?;
|
||||
let elif: ast::IfExpr = support::child(self.syntax())?;
|
||||
ElseBranch::IfExpr(elif)
|
||||
}
|
||||
};
|
||||
|
@ -44,17 +44,7 @@ impl ast::IfExpr {
|
|||
}
|
||||
|
||||
fn blocks(&self) -> AstChildren<ast::BlockExpr> {
|
||||
children(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::RefExpr {
|
||||
pub fn is_mut(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|n| n.kind() == T![mut])
|
||||
}
|
||||
|
||||
pub fn raw_token(&self) -> Option<SyntaxToken> {
|
||||
None // FIXME: implement &raw
|
||||
support::children(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -212,15 +202,15 @@ impl ast::BinExpr {
|
|||
}
|
||||
|
||||
pub fn lhs(&self) -> Option<ast::Expr> {
|
||||
children(self).next()
|
||||
support::children(self.syntax()).next()
|
||||
}
|
||||
|
||||
pub fn rhs(&self) -> Option<ast::Expr> {
|
||||
children(self).nth(1)
|
||||
support::children(self.syntax()).nth(1)
|
||||
}
|
||||
|
||||
pub fn sub_exprs(&self) -> (Option<ast::Expr>, Option<ast::Expr>) {
|
||||
let mut children = children(self);
|
||||
let mut children = support::children(self.syntax());
|
||||
let first = children.next();
|
||||
let second = children.next();
|
||||
(first, second)
|
||||
|
@ -275,10 +265,10 @@ impl ast::RangeExpr {
|
|||
|
||||
impl ast::IndexExpr {
|
||||
pub fn base(&self) -> Option<ast::Expr> {
|
||||
children(self).next()
|
||||
support::children(self.syntax()).next()
|
||||
}
|
||||
pub fn index(&self) -> Option<ast::Expr> {
|
||||
children(self).nth(1)
|
||||
support::children(self.syntax()).nth(1)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -291,11 +281,11 @@ impl ast::ArrayExpr {
|
|||
pub fn kind(&self) -> ArrayExprKind {
|
||||
if self.is_repeat() {
|
||||
ArrayExprKind::Repeat {
|
||||
initializer: children(self).next(),
|
||||
repeat: children(self).nth(1),
|
||||
initializer: support::children(self.syntax()).next(),
|
||||
repeat: support::children(self.syntax()).nth(1),
|
||||
}
|
||||
} else {
|
||||
ArrayExprKind::ElementList(children(self))
|
||||
ArrayExprKind::ElementList(support::children(self.syntax()))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,16 +2,12 @@
|
|||
//! Extensions for various expressions live in a sibling `expr_extensions` module.
|
||||
|
||||
use itertools::Itertools;
|
||||
use ra_parser::SyntaxKind;
|
||||
|
||||
use crate::{
|
||||
ast::{
|
||||
self, child_opt, children, support, AstNode, AstToken, AttrInput, NameOwner, SyntaxNode,
|
||||
},
|
||||
SmolStr, SyntaxElement,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, T,
|
||||
ast::{self, support, AstNode, AttrInput, NameOwner, SyntaxNode},
|
||||
SmolStr, SyntaxElement, SyntaxToken, T,
|
||||
};
|
||||
use ra_parser::SyntaxKind;
|
||||
|
||||
impl ast::Name {
|
||||
pub fn text(&self) -> &SmolStr {
|
||||
|
@ -25,13 +21,7 @@ impl ast::NameRef {
|
|||
}
|
||||
|
||||
pub fn as_tuple_field(&self) -> Option<usize> {
|
||||
self.syntax().children_with_tokens().find_map(|c| {
|
||||
if c.kind() == SyntaxKind::INT_NUMBER {
|
||||
c.as_token().and_then(|tok| tok.text().as_str().parse().ok())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
self.text().parse().ok()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -87,7 +77,7 @@ impl ast::Attr {
|
|||
first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind);
|
||||
|
||||
match (first_token_kind, second_token_kind) {
|
||||
(Some(SyntaxKind::POUND), Some(SyntaxKind::EXCL)) => AttrKind::Inner,
|
||||
(Some(SyntaxKind::POUND), Some(T![!])) => AttrKind::Inner,
|
||||
_ => AttrKind::Outer,
|
||||
}
|
||||
}
|
||||
|
@ -140,15 +130,6 @@ impl ast::Path {
|
|||
}
|
||||
}
|
||||
|
||||
impl ast::Module {
|
||||
pub fn has_semi(&self) -> bool {
|
||||
match self.syntax().last_child_or_token() {
|
||||
None => false,
|
||||
Some(node) => node.kind() == T![;],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::UseTreeList {
|
||||
pub fn parent_use_tree(&self) -> ast::UseTree {
|
||||
self.syntax()
|
||||
|
@ -174,15 +155,11 @@ impl ast::ImplDef {
|
|||
}
|
||||
|
||||
fn target(&self) -> (Option<ast::TypeRef>, Option<ast::TypeRef>) {
|
||||
let mut types = children(self);
|
||||
let mut types = support::children(self.syntax());
|
||||
let first = types.next();
|
||||
let second = types.next();
|
||||
(first, second)
|
||||
}
|
||||
|
||||
pub fn is_negative(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|t| t.kind() == T![!])
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
@ -194,9 +171,9 @@ pub enum StructKind {
|
|||
|
||||
impl StructKind {
|
||||
fn from_node<N: AstNode>(node: &N) -> StructKind {
|
||||
if let Some(nfdl) = child_opt::<_, ast::RecordFieldDefList>(node) {
|
||||
if let Some(nfdl) = support::child::<ast::RecordFieldDefList>(node.syntax()) {
|
||||
StructKind::Record(nfdl)
|
||||
} else if let Some(pfl) = child_opt::<_, ast::TupleFieldDefList>(node) {
|
||||
} else if let Some(pfl) = support::child::<ast::TupleFieldDefList>(node.syntax()) {
|
||||
StructKind::Tuple(pfl)
|
||||
} else {
|
||||
StructKind::Unit
|
||||
|
@ -210,6 +187,36 @@ impl ast::StructDef {
|
|||
}
|
||||
}
|
||||
|
||||
impl ast::RecordField {
|
||||
pub fn for_field_name(field_name: &ast::NameRef) -> Option<ast::RecordField> {
|
||||
let candidate =
|
||||
field_name.syntax().parent().and_then(ast::RecordField::cast).or_else(|| {
|
||||
field_name.syntax().ancestors().nth(4).and_then(ast::RecordField::cast)
|
||||
})?;
|
||||
if candidate.field_name().as_ref() == Some(field_name) {
|
||||
Some(candidate)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Deals with field init shorthand
|
||||
pub fn field_name(&self) -> Option<ast::NameRef> {
|
||||
if let Some(name_ref) = self.name_ref() {
|
||||
return Some(name_ref);
|
||||
}
|
||||
if let Some(ast::Expr::PathExpr(expr)) = self.expr() {
|
||||
let path = expr.path()?;
|
||||
let segment = path.segment()?;
|
||||
let name_ref = segment.name_ref()?;
|
||||
if path.qualifier().is_none() {
|
||||
return Some(name_ref);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::EnumVariant {
|
||||
pub fn parent_enum(&self) -> ast::EnumDef {
|
||||
self.syntax()
|
||||
|
@ -223,41 +230,6 @@ impl ast::EnumVariant {
|
|||
}
|
||||
}
|
||||
|
||||
impl ast::FnDef {
|
||||
pub fn semicolon_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax()
|
||||
.last_child_or_token()
|
||||
.and_then(|it| it.into_token())
|
||||
.filter(|it| it.kind() == T![;])
|
||||
}
|
||||
|
||||
pub fn is_async(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|it| it.kind() == T![async])
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::LetStmt {
|
||||
pub fn has_semi(&self) -> bool {
|
||||
match self.syntax().last_child_or_token() {
|
||||
None => false,
|
||||
Some(node) => node.kind() == T![;],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eq_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax().children_with_tokens().find(|t| t.kind() == EQ).and_then(|it| it.into_token())
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::ExprStmt {
|
||||
pub fn has_semi(&self) -> bool {
|
||||
match self.syntax().last_child_or_token() {
|
||||
None => false,
|
||||
Some(node) => node.kind() == T![;],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum FieldKind {
|
||||
Name(ast::NameRef),
|
||||
|
@ -286,25 +258,6 @@ impl ast::FieldExpr {
|
|||
}
|
||||
}
|
||||
|
||||
impl ast::RefPat {
|
||||
pub fn is_mut(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|n| n.kind() == T![mut])
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::BindPat {
|
||||
pub fn is_mutable(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|n| n.kind() == T![mut])
|
||||
}
|
||||
|
||||
pub fn is_ref(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|n| n.kind() == T![ref])
|
||||
}
|
||||
pub fn has_at(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|it| it.kind() == T![@])
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SlicePatComponents {
|
||||
pub prefix: Vec<ast::Pat>,
|
||||
pub slice: Option<ast::Pat>,
|
||||
|
@ -339,18 +292,6 @@ impl ast::SlicePat {
|
|||
}
|
||||
}
|
||||
|
||||
impl ast::PointerType {
|
||||
pub fn is_mut(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|n| n.kind() == T![mut])
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::ReferenceType {
|
||||
pub fn is_mut(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|n| n.kind() == T![mut])
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum SelfParamKind {
|
||||
/// self
|
||||
|
@ -363,8 +304,8 @@ pub enum SelfParamKind {
|
|||
|
||||
impl ast::SelfParam {
|
||||
pub fn kind(&self) -> SelfParamKind {
|
||||
if self.amp().is_some() {
|
||||
if self.amp_mut_kw().is_some() {
|
||||
if self.amp_token().is_some() {
|
||||
if self.mut_token().is_some() {
|
||||
SelfParamKind::MutRef
|
||||
} else {
|
||||
SelfParamKind::Ref
|
||||
|
@ -373,24 +314,6 @@ impl ast::SelfParam {
|
|||
SelfParamKind::Owned
|
||||
}
|
||||
}
|
||||
|
||||
/// the "mut" in "mut self", not the one in "&mut self"
|
||||
pub fn mut_kw(&self) -> Option<ast::MutKw> {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.take_while(|it| it.kind() != T![&])
|
||||
.find_map(ast::MutKw::cast)
|
||||
}
|
||||
|
||||
/// the "mut" in "&mut self", not the one in "mut self"
|
||||
pub fn amp_mut_kw(&self) -> Option<ast::MutKw> {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.skip_while(|it| it.kind() != T![&])
|
||||
.find_map(ast::MutKw::cast)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
|
@ -400,53 +323,43 @@ pub enum TypeBoundKind {
|
|||
/// for<'a> ...
|
||||
ForType(ast::ForType),
|
||||
/// 'a
|
||||
Lifetime(ast::Lifetime),
|
||||
Lifetime(SyntaxToken),
|
||||
}
|
||||
|
||||
impl ast::TypeBound {
|
||||
pub fn kind(&self) -> TypeBoundKind {
|
||||
if let Some(path_type) = children(self).next() {
|
||||
if let Some(path_type) = support::children(self.syntax()).next() {
|
||||
TypeBoundKind::PathType(path_type)
|
||||
} else if let Some(for_type) = children(self).next() {
|
||||
} else if let Some(for_type) = support::children(self.syntax()).next() {
|
||||
TypeBoundKind::ForType(for_type)
|
||||
} else if let Some(lifetime) = self.lifetime() {
|
||||
} else if let Some(lifetime) = self.lifetime_token() {
|
||||
TypeBoundKind::Lifetime(lifetime)
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_question_mark(&self) -> bool {
|
||||
self.question().is_some()
|
||||
}
|
||||
|
||||
pub fn const_question(&self) -> Option<ast::Question> {
|
||||
pub fn const_question_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.take_while(|it| it.kind() != T![const])
|
||||
.find_map(ast::Question::cast)
|
||||
.find(|it| it.kind() == T![?])
|
||||
}
|
||||
|
||||
pub fn question(&self) -> Option<ast::Question> {
|
||||
if self.const_kw().is_some() {
|
||||
pub fn question_token(&self) -> Option<SyntaxToken> {
|
||||
if self.const_token().is_some() {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.skip_while(|it| it.kind() != T![const])
|
||||
.find_map(ast::Question::cast)
|
||||
.find(|it| it.kind() == T![?])
|
||||
} else {
|
||||
support::token(&self.syntax)
|
||||
support::token(&self.syntax, T![?])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::TraitDef {
|
||||
pub fn is_auto(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|t| t.kind() == T![auto])
|
||||
}
|
||||
}
|
||||
|
||||
pub enum VisibilityKind {
|
||||
In(ast::Path),
|
||||
PubCrate,
|
||||
|
@ -457,30 +370,18 @@ pub enum VisibilityKind {
|
|||
|
||||
impl ast::Visibility {
|
||||
pub fn kind(&self) -> VisibilityKind {
|
||||
if let Some(path) = children(self).next() {
|
||||
if let Some(path) = support::children(self.syntax()).next() {
|
||||
VisibilityKind::In(path)
|
||||
} else if self.is_pub_crate() {
|
||||
} else if self.crate_token().is_some() {
|
||||
VisibilityKind::PubCrate
|
||||
} else if self.is_pub_super() {
|
||||
} else if self.super_token().is_some() {
|
||||
VisibilityKind::PubSuper
|
||||
} else if self.is_pub_self() {
|
||||
} else if self.self_token().is_some() {
|
||||
VisibilityKind::PubSuper
|
||||
} else {
|
||||
VisibilityKind::Pub
|
||||
}
|
||||
}
|
||||
|
||||
fn is_pub_crate(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|it| it.kind() == T![crate])
|
||||
}
|
||||
|
||||
fn is_pub_super(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|it| it.kind() == T![super])
|
||||
}
|
||||
|
||||
fn is_pub_self(&self) -> bool {
|
||||
self.syntax().children_with_tokens().any(|it| it.kind() == T![self])
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::MacroCall {
|
||||
|
@ -495,12 +396,12 @@ impl ast::MacroCall {
|
|||
}
|
||||
|
||||
impl ast::LifetimeParam {
|
||||
pub fn lifetime_bounds(&self) -> impl Iterator<Item = ast::Lifetime> {
|
||||
pub fn lifetime_bounds(&self) -> impl Iterator<Item = SyntaxToken> {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.skip_while(|x| x.kind() != T![:])
|
||||
.filter_map(ast::Lifetime::cast)
|
||||
.filter(|it| it.kind() == T![lifetime])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -508,7 +409,7 @@ impl ast::RangePat {
|
|||
pub fn start(&self) -> Option<ast::Pat> {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.take_while(|it| !ast::RangeSeparator::can_cast(it.kind()))
|
||||
.take_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
|
||||
.filter_map(|it| it.into_node())
|
||||
.find_map(ast::Pat::cast)
|
||||
}
|
||||
|
@ -516,18 +417,24 @@ impl ast::RangePat {
|
|||
pub fn end(&self) -> Option<ast::Pat> {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.skip_while(|it| !ast::RangeSeparator::can_cast(it.kind()))
|
||||
.skip_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
|
||||
.filter_map(|it| it.into_node())
|
||||
.find_map(ast::Pat::cast)
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::TokenTree {
|
||||
pub fn left_delimiter(&self) -> Option<ast::LeftDelimiter> {
|
||||
self.syntax().first_child_or_token()?.into_token().and_then(ast::LeftDelimiter::cast)
|
||||
pub fn left_delimiter_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax().first_child_or_token()?.into_token().filter(|it| match it.kind() {
|
||||
T!['{'] | T!['('] | T!['['] => true,
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn right_delimiter(&self) -> Option<ast::RightDelimiter> {
|
||||
self.syntax().last_child_or_token()?.into_token().and_then(ast::RightDelimiter::cast)
|
||||
pub fn right_delimiter_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax().last_child_or_token()?.into_token().filter(|it| match it.kind() {
|
||||
T!['{'] | T!['('] | T!['['] => true,
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -100,6 +100,9 @@ pub fn expr_empty_block() -> ast::Expr {
|
|||
pub fn expr_unimplemented() -> ast::Expr {
|
||||
expr_from_text("unimplemented!()")
|
||||
}
|
||||
pub fn expr_todo() -> ast::Expr {
|
||||
expr_from_text("todo!()")
|
||||
}
|
||||
pub fn expr_path(path: ast::Path) -> ast::Expr {
|
||||
expr_from_text(&path.to_string())
|
||||
}
|
||||
|
|
|
@ -1,83 +1,77 @@
|
|||
//! Various traits that are implemented by ast nodes.
|
||||
//!
|
||||
//! The implementations are usually trivial, and live in generated.rs
|
||||
|
||||
use itertools::Itertools;
|
||||
use stdx::SepBy;
|
||||
|
||||
use crate::{
|
||||
ast::{self, child_opt, children, support, AstChildren, AstNode, AstToken},
|
||||
ast::{self, support, AstChildren, AstNode, AstToken},
|
||||
syntax_node::SyntaxElementChildren,
|
||||
SyntaxToken, T,
|
||||
};
|
||||
|
||||
pub trait TypeAscriptionOwner: AstNode {
|
||||
fn ascribed_type(&self) -> Option<ast::TypeRef> {
|
||||
child_opt(self)
|
||||
support::child(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait NameOwner: AstNode {
|
||||
fn name(&self) -> Option<ast::Name> {
|
||||
child_opt(self)
|
||||
support::child(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VisibilityOwner: AstNode {
|
||||
fn visibility(&self) -> Option<ast::Visibility> {
|
||||
child_opt(self)
|
||||
support::child(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait LoopBodyOwner: AstNode {
|
||||
fn loop_body(&self) -> Option<ast::BlockExpr> {
|
||||
child_opt(self)
|
||||
support::child(self.syntax())
|
||||
}
|
||||
|
||||
fn label(&self) -> Option<ast::Label> {
|
||||
child_opt(self)
|
||||
support::child(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ArgListOwner: AstNode {
|
||||
fn arg_list(&self) -> Option<ast::ArgList> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait FnDefOwner: AstNode {
|
||||
fn functions(&self) -> AstChildren<ast::FnDef> {
|
||||
children(self)
|
||||
support::child(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ModuleItemOwner: AstNode {
|
||||
fn items(&self) -> AstChildren<ast::ModuleItem> {
|
||||
children(self)
|
||||
support::children(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TypeParamsOwner: AstNode {
|
||||
fn type_param_list(&self) -> Option<ast::TypeParamList> {
|
||||
child_opt(self)
|
||||
support::child(self.syntax())
|
||||
}
|
||||
|
||||
fn where_clause(&self) -> Option<ast::WhereClause> {
|
||||
child_opt(self)
|
||||
support::child(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TypeBoundsOwner: AstNode {
|
||||
fn type_bound_list(&self) -> Option<ast::TypeBoundList> {
|
||||
child_opt(self)
|
||||
support::child(self.syntax())
|
||||
}
|
||||
|
||||
fn colon(&self) -> Option<ast::Colon> {
|
||||
support::token(self.syntax())
|
||||
fn colon_token(&self) -> Option<SyntaxToken> {
|
||||
support::token(self.syntax(), T![:])
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AttrsOwner: AstNode {
|
||||
fn attrs(&self) -> AstChildren<ast::Attr> {
|
||||
children(self)
|
||||
support::children(self.syntax())
|
||||
}
|
||||
fn has_atom_attr(&self, atom: &str) -> bool {
|
||||
self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
|
||||
|
@ -122,7 +116,8 @@ pub trait DocCommentsOwner: AstNode {
|
|||
// of a line in markdown.
|
||||
line[pos..end].to_owned()
|
||||
})
|
||||
.join("\n");
|
||||
.sep_by("\n")
|
||||
.to_string();
|
||||
|
||||
if has_comments {
|
||||
Some(docs)
|
||||
|
|
|
@ -19,6 +19,11 @@
|
|||
//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
|
||||
//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
|
||||
|
||||
#[allow(unused)]
|
||||
macro_rules! eprintln {
|
||||
($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
|
||||
}
|
||||
|
||||
mod syntax_node;
|
||||
mod syntax_error;
|
||||
mod parsing;
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
use crate::{
|
||||
SyntaxError,
|
||||
SyntaxKind::{self, *},
|
||||
TextRange, TextUnit,
|
||||
TextRange, TextUnit, T,
|
||||
};
|
||||
|
||||
/// A token of Rust source.
|
||||
|
@ -115,21 +115,20 @@ fn rustc_token_kind_to_syntax_kind(
|
|||
// being `u16` that come from `rowan::SyntaxKind`.
|
||||
|
||||
let syntax_kind = {
|
||||
use rustc_lexer::TokenKind as TK;
|
||||
match rustc_token_kind {
|
||||
TK::LineComment => COMMENT,
|
||||
rustc_lexer::TokenKind::LineComment => COMMENT,
|
||||
|
||||
TK::BlockComment { terminated: true } => COMMENT,
|
||||
TK::BlockComment { terminated: false } => {
|
||||
rustc_lexer::TokenKind::BlockComment { terminated: true } => COMMENT,
|
||||
rustc_lexer::TokenKind::BlockComment { terminated: false } => {
|
||||
return (
|
||||
COMMENT,
|
||||
Some("Missing trailing `*/` symbols to terminate the block comment"),
|
||||
);
|
||||
}
|
||||
|
||||
TK::Whitespace => WHITESPACE,
|
||||
rustc_lexer::TokenKind::Whitespace => WHITESPACE,
|
||||
|
||||
TK::Ident => {
|
||||
rustc_lexer::TokenKind::Ident => {
|
||||
if token_text == "_" {
|
||||
UNDERSCORE
|
||||
} else {
|
||||
|
@ -137,42 +136,42 @@ fn rustc_token_kind_to_syntax_kind(
|
|||
}
|
||||
}
|
||||
|
||||
TK::RawIdent => IDENT,
|
||||
TK::Literal { kind, .. } => return match_literal_kind(&kind),
|
||||
rustc_lexer::TokenKind::RawIdent => IDENT,
|
||||
rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(&kind),
|
||||
|
||||
TK::Lifetime { starts_with_number: false } => LIFETIME,
|
||||
TK::Lifetime { starts_with_number: true } => {
|
||||
rustc_lexer::TokenKind::Lifetime { starts_with_number: false } => LIFETIME,
|
||||
rustc_lexer::TokenKind::Lifetime { starts_with_number: true } => {
|
||||
return (LIFETIME, Some("Lifetime name cannot start with a number"))
|
||||
}
|
||||
|
||||
TK::Semi => SEMI,
|
||||
TK::Comma => COMMA,
|
||||
TK::Dot => DOT,
|
||||
TK::OpenParen => L_PAREN,
|
||||
TK::CloseParen => R_PAREN,
|
||||
TK::OpenBrace => L_CURLY,
|
||||
TK::CloseBrace => R_CURLY,
|
||||
TK::OpenBracket => L_BRACK,
|
||||
TK::CloseBracket => R_BRACK,
|
||||
TK::At => AT,
|
||||
TK::Pound => POUND,
|
||||
TK::Tilde => TILDE,
|
||||
TK::Question => QUESTION,
|
||||
TK::Colon => COLON,
|
||||
TK::Dollar => DOLLAR,
|
||||
TK::Eq => EQ,
|
||||
TK::Not => EXCL,
|
||||
TK::Lt => L_ANGLE,
|
||||
TK::Gt => R_ANGLE,
|
||||
TK::Minus => MINUS,
|
||||
TK::And => AMP,
|
||||
TK::Or => PIPE,
|
||||
TK::Plus => PLUS,
|
||||
TK::Star => STAR,
|
||||
TK::Slash => SLASH,
|
||||
TK::Caret => CARET,
|
||||
TK::Percent => PERCENT,
|
||||
TK::Unknown => ERROR,
|
||||
rustc_lexer::TokenKind::Semi => T![;],
|
||||
rustc_lexer::TokenKind::Comma => T![,],
|
||||
rustc_lexer::TokenKind::Dot => T![.],
|
||||
rustc_lexer::TokenKind::OpenParen => T!['('],
|
||||
rustc_lexer::TokenKind::CloseParen => T![')'],
|
||||
rustc_lexer::TokenKind::OpenBrace => T!['{'],
|
||||
rustc_lexer::TokenKind::CloseBrace => T!['}'],
|
||||
rustc_lexer::TokenKind::OpenBracket => T!['['],
|
||||
rustc_lexer::TokenKind::CloseBracket => T![']'],
|
||||
rustc_lexer::TokenKind::At => T![@],
|
||||
rustc_lexer::TokenKind::Pound => T![#],
|
||||
rustc_lexer::TokenKind::Tilde => T![~],
|
||||
rustc_lexer::TokenKind::Question => T![?],
|
||||
rustc_lexer::TokenKind::Colon => T![:],
|
||||
rustc_lexer::TokenKind::Dollar => T![$],
|
||||
rustc_lexer::TokenKind::Eq => T![=],
|
||||
rustc_lexer::TokenKind::Not => T![!],
|
||||
rustc_lexer::TokenKind::Lt => T![<],
|
||||
rustc_lexer::TokenKind::Gt => T![>],
|
||||
rustc_lexer::TokenKind::Minus => T![-],
|
||||
rustc_lexer::TokenKind::And => T![&],
|
||||
rustc_lexer::TokenKind::Or => T![|],
|
||||
rustc_lexer::TokenKind::Plus => T![+],
|
||||
rustc_lexer::TokenKind::Star => T![*],
|
||||
rustc_lexer::TokenKind::Slash => T![/],
|
||||
rustc_lexer::TokenKind::Caret => T![^],
|
||||
rustc_lexer::TokenKind::Percent => T![%],
|
||||
rustc_lexer::TokenKind::Unknown => ERROR,
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ use crate::{AstNode, SyntaxKind, SyntaxNode, TextRange};
|
|||
|
||||
/// A pointer to a syntax node inside a file. It can be used to remember a
|
||||
/// specific node across reparses of the same file.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct SyntaxNodePtr {
|
||||
pub(crate) range: TextRange,
|
||||
kind: SyntaxKind,
|
||||
|
@ -21,7 +21,7 @@ impl SyntaxNodePtr {
|
|||
SyntaxNodePtr { range: node.text_range(), kind: node.kind() }
|
||||
}
|
||||
|
||||
pub fn to_node(self, root: &SyntaxNode) -> SyntaxNode {
|
||||
pub fn to_node(&self, root: &SyntaxNode) -> SyntaxNode {
|
||||
assert!(root.parent().is_none());
|
||||
successors(Some(root.clone()), |node| {
|
||||
node.children().find(|it| self.range.is_subrange(&it.text_range()))
|
||||
|
@ -30,11 +30,11 @@ impl SyntaxNodePtr {
|
|||
.unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self))
|
||||
}
|
||||
|
||||
pub fn range(self) -> TextRange {
|
||||
pub fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
|
||||
pub fn kind(self) -> SyntaxKind {
|
||||
pub fn kind(&self) -> SyntaxKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
|
@ -53,10 +53,9 @@ pub struct AstPtr<N: AstNode> {
|
|||
_ty: PhantomData<fn() -> N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> Copy for AstPtr<N> {}
|
||||
impl<N: AstNode> Clone for AstPtr<N> {
|
||||
fn clone(&self) -> AstPtr<N> {
|
||||
*self
|
||||
AstPtr { raw: self.raw.clone(), _ty: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -79,13 +78,13 @@ impl<N: AstNode> AstPtr<N> {
|
|||
AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }
|
||||
}
|
||||
|
||||
pub fn to_node(self, root: &SyntaxNode) -> N {
|
||||
pub fn to_node(&self, root: &SyntaxNode) -> N {
|
||||
let syntax_node = self.raw.to_node(root);
|
||||
N::cast(syntax_node).unwrap()
|
||||
}
|
||||
|
||||
pub fn syntax_node_ptr(self) -> SyntaxNodePtr {
|
||||
self.raw
|
||||
pub fn syntax_node_ptr(&self) -> SyntaxNodePtr {
|
||||
self.raw.clone()
|
||||
}
|
||||
|
||||
pub fn cast<U: AstNode>(self) -> Option<AstPtr<U>> {
|
||||
|
|
|
@ -6,5 +6,5 @@ WHITESPACE 1 " "
|
|||
IDENT 1 "a"
|
||||
WHITESPACE 1 " "
|
||||
IDENT 5 "quote"
|
||||
EXCL 1 "!"
|
||||
BANG 1 "!"
|
||||
> error[0; 4) token("r## ") msg(Missing `"` symbol after `#` symbols to begin the raw string literal)
|
||||
|
|
|
@ -6,5 +6,5 @@ WHITESPACE 1 " "
|
|||
IDENT 1 "a"
|
||||
WHITESPACE 1 " "
|
||||
IDENT 5 "quote"
|
||||
EXCL 1 "!"
|
||||
BANG 1 "!"
|
||||
> error[0; 5) token("br## ") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
SEMI 1 ";"
|
||||
SEMICOLON 1 ";"
|
||||
WHITESPACE 1 " "
|
||||
COMMA 1 ","
|
||||
WHITESPACE 1 " "
|
||||
|
@ -65,9 +65,9 @@ WHITESPACE 1 " "
|
|||
EQ 1 "="
|
||||
R_ANGLE 1 ">"
|
||||
WHITESPACE 1 "\n"
|
||||
EXCL 1 "!"
|
||||
BANG 1 "!"
|
||||
WHITESPACE 1 " "
|
||||
EXCL 1 "!"
|
||||
BANG 1 "!"
|
||||
EQ 1 "="
|
||||
WHITESPACE 1 "\n"
|
||||
MINUS 1 "-"
|
||||
|
|
|
@ -3,7 +3,7 @@ SOURCE_FILE@[0; 42)
|
|||
WHITESPACE@[20; 21) "\n"
|
||||
ATTR@[21; 23)
|
||||
POUND@[21; 22) "#"
|
||||
EXCL@[22; 23) "!"
|
||||
BANG@[22; 23) "!"
|
||||
ERROR@[23; 24)
|
||||
SLASH@[23; 24) "/"
|
||||
USE_ITEM@[24; 28)
|
||||
|
@ -29,11 +29,11 @@ SOURCE_FILE@[0; 42)
|
|||
error [23; 23): expected `[`
|
||||
error [23; 23): expected an item
|
||||
error [27; 27): expected one of `*`, `::`, `{`, `self`, `super` or an identifier
|
||||
error [28; 28): expected SEMI
|
||||
error [31; 31): expected EXCL
|
||||
error [28; 28): expected SEMICOLON
|
||||
error [31; 31): expected BANG
|
||||
error [31; 31): expected `{`, `[`, `(`
|
||||
error [31; 31): expected SEMI
|
||||
error [31; 31): expected SEMICOLON
|
||||
error [31; 31): expected an item
|
||||
error [35; 35): expected EXCL
|
||||
error [35; 35): expected BANG
|
||||
error [41; 41): expected `{`, `[`, `(`
|
||||
error [41; 41): expected SEMI
|
||||
error [41; 41): expected SEMICOLON
|
||||
|
|
|
@ -34,6 +34,6 @@ SOURCE_FILE@[0; 40)
|
|||
WHITESPACE@[37; 38) "\n"
|
||||
R_CURLY@[38; 39) "}"
|
||||
ERROR@[39; 40)
|
||||
SEMI@[39; 40) ";"
|
||||
SEMICOLON@[39; 40) ";"
|
||||
error [39; 39): expected item, found `;`
|
||||
consider removing this semicolon
|
||||
|
|
|
@ -8,9 +8,9 @@ SOURCE_FILE@[0; 12)
|
|||
PATH_SEGMENT@[4; 7)
|
||||
NAME_REF@[4; 7)
|
||||
IDENT@[4; 7) "foo"
|
||||
COLONCOLON@[7; 9) "::"
|
||||
COLON2@[7; 9) "::"
|
||||
PATH_SEGMENT@[9; 11)
|
||||
ERROR@[9; 11)
|
||||
INT_NUMBER@[9; 11) "92"
|
||||
SEMI@[11; 12) ";"
|
||||
SEMICOLON@[11; 12) ";"
|
||||
error [9; 9): expected identifier
|
||||
|
|
|
@ -7,7 +7,7 @@ SOURCE_FILE@[0; 31)
|
|||
WHITESPACE@[9; 10) " "
|
||||
NAME@[10; 11)
|
||||
IDENT@[10; 11) "S"
|
||||
SEMI@[11; 12) ";"
|
||||
SEMICOLON@[11; 12) ";"
|
||||
WHITESPACE@[12; 14) "\n\n"
|
||||
ERROR@[14; 15)
|
||||
R_CURLY@[14; 15) "}"
|
||||
|
|
|
@ -76,6 +76,6 @@ SOURCE_FILE@[0; 95)
|
|||
WHITESPACE@[92; 93) "\n"
|
||||
R_CURLY@[93; 94) "}"
|
||||
WHITESPACE@[94; 95) "\n"
|
||||
error [17; 17): expected EXCL
|
||||
error [19; 19): expected SEMI
|
||||
error [17; 17): expected BANG
|
||||
error [19; 19): expected SEMICOLON
|
||||
error [20; 20): expected an item
|
||||
|
|
|
@ -43,7 +43,7 @@ SOURCE_FILE@[0; 43)
|
|||
WHITESPACE@[39; 40) " "
|
||||
NAME@[40; 41)
|
||||
IDENT@[40; 41) "T"
|
||||
SEMI@[41; 42) ";"
|
||||
SEMICOLON@[41; 42) ";"
|
||||
WHITESPACE@[42; 43) "\n"
|
||||
error [9; 9): expected type parameter
|
||||
error [11; 11): expected COMMA
|
||||
|
@ -53,6 +53,6 @@ error [12; 12): expected an item
|
|||
error [14; 14): expected an item
|
||||
error [15; 15): expected an item
|
||||
error [17; 17): expected an item
|
||||
error [24; 24): expected SEMI
|
||||
error [24; 24): expected SEMICOLON
|
||||
error [24; 24): expected expression
|
||||
error [25; 25): expected SEMI
|
||||
error [25; 25): expected SEMICOLON
|
||||
|
|
|
@ -37,9 +37,9 @@ SOURCE_FILE@[0; 42)
|
|||
R_PAREN@[35; 36) ")"
|
||||
WHITESPACE@[36; 37) " "
|
||||
R_CURLY@[37; 38) "}"
|
||||
SEMI@[38; 39) ";"
|
||||
SEMICOLON@[38; 39) ";"
|
||||
WHITESPACE@[39; 40) "\n"
|
||||
R_CURLY@[40; 41) "}"
|
||||
WHITESPACE@[41; 42) "\n"
|
||||
error [24; 24): expected `{`
|
||||
error [24; 24): expected SEMI
|
||||
error [24; 24): expected SEMICOLON
|
||||
|
|
|
@ -8,6 +8,6 @@ SOURCE_FILE@[0; 19)
|
|||
WHITESPACE@[13; 14) " "
|
||||
NAME@[14; 17)
|
||||
IDENT@[14; 17) "Foo"
|
||||
SEMI@[17; 18) ";"
|
||||
SEMICOLON@[17; 18) ";"
|
||||
WHITESPACE@[18; 19) "\n"
|
||||
error [6; 6): expected existential, fn, trait or impl
|
||||
|
|
|
@ -106,7 +106,7 @@ SOURCE_FILE@[0; 389)
|
|||
WHITESPACE@[122; 123)
|
||||
LITERAL@[123; 124)
|
||||
INT_NUMBER@[123; 124) "0"
|
||||
SEMI@[124; 125)
|
||||
SEMICOLON@[124; 125)
|
||||
WHITESPACE@[125; 130)
|
||||
LET_STMT@[130; 389)
|
||||
LET_KW@[130; 133)
|
||||
|
@ -226,7 +226,7 @@ SOURCE_FILE@[0; 389)
|
|||
err: `expected pattern`
|
||||
PARAM@[236; 237)
|
||||
ERROR@[236; 237)
|
||||
EXCL@[236; 237)
|
||||
BANG@[236; 237)
|
||||
err: `expected COMMA`
|
||||
PARAM@[237; 242)
|
||||
BIND_PAT@[237; 242)
|
||||
|
@ -272,7 +272,7 @@ SOURCE_FILE@[0; 389)
|
|||
err: `expected pattern`
|
||||
PARAM@[283; 284)
|
||||
ERROR@[283; 284)
|
||||
SEMI@[283; 284)
|
||||
SEMICOLON@[283; 284)
|
||||
err: `expected COMMA`
|
||||
WHITESPACE@[284; 297)
|
||||
err: `expected pattern`
|
||||
|
@ -327,7 +327,7 @@ SOURCE_FILE@[0; 389)
|
|||
err: `expected pattern`
|
||||
PARAM@[346; 347)
|
||||
ERROR@[346; 347)
|
||||
SEMI@[346; 347)
|
||||
SEMICOLON@[346; 347)
|
||||
err: `expected COMMA`
|
||||
WHITESPACE@[347; 360)
|
||||
err: `expected pattern`
|
||||
|
@ -371,7 +371,7 @@ SOURCE_FILE@[0; 389)
|
|||
err: `expected pattern`
|
||||
PARAM@[385; 386)
|
||||
ERROR@[385; 386)
|
||||
SEMI@[385; 386)
|
||||
SEMICOLON@[385; 386)
|
||||
err: `expected COMMA`
|
||||
WHITESPACE@[386; 387)
|
||||
err: `expected pattern`
|
||||
|
|
|
@ -67,7 +67,7 @@ SOURCE_FILE@[0; 86)
|
|||
ERROR@[82; 83)
|
||||
R_PAREN@[82; 83) ")"
|
||||
ERROR@[83; 84)
|
||||
SEMI@[83; 84) ";"
|
||||
SEMICOLON@[83; 84) ";"
|
||||
WHITESPACE@[84; 86) "\n\n"
|
||||
error [67; 67): expected type
|
||||
error [68; 68): expected COMMA
|
||||
|
@ -80,7 +80,7 @@ error [68; 68): expected COMMA
|
|||
error [72; 72): expected COMMA
|
||||
error [72; 72): expected a type
|
||||
error [72; 72): expected R_PAREN
|
||||
error [72; 72): expected SEMI
|
||||
error [72; 72): expected SEMICOLON
|
||||
error [72; 72): expected an item
|
||||
error [73; 73): expected an item
|
||||
error [79; 79): expected an item
|
||||
|
|
|
@ -37,8 +37,8 @@ SOURCE_FILE@[0; 56)
|
|||
WHITESPACE@[49; 50) " "
|
||||
LITERAL@[50; 52)
|
||||
INT_NUMBER@[50; 52) "92"
|
||||
SEMI@[52; 53) ";"
|
||||
SEMICOLON@[52; 53) ";"
|
||||
WHITESPACE@[53; 54) "\n"
|
||||
R_CURLY@[54; 55) "}"
|
||||
WHITESPACE@[55; 56) "\n"
|
||||
error [38; 38): expected SEMI
|
||||
error [38; 38): expected SEMICOLON
|
||||
|
|
|
@ -34,7 +34,7 @@ SOURCE_FILE@[0; 47)
|
|||
WHITESPACE@[32; 33) " "
|
||||
LITERAL@[33; 35)
|
||||
INT_NUMBER@[33; 35) "92"
|
||||
SEMI@[35; 36) ";"
|
||||
SEMICOLON@[35; 36) ";"
|
||||
WHITESPACE@[36; 41) "\n "
|
||||
BIN_EXPR@[41; 44)
|
||||
LITERAL@[41; 42)
|
||||
|
|
|
@ -60,7 +60,7 @@ SOURCE_FILE@[0; 183)
|
|||
ARG_LIST@[82; 84)
|
||||
L_PAREN@[82; 83) "("
|
||||
R_PAREN@[83; 84) ")"
|
||||
SEMI@[84; 85) ";"
|
||||
SEMICOLON@[84; 85) ";"
|
||||
WHITESPACE@[85; 94) "\n "
|
||||
METHOD_CALL_EXPR@[94; 155)
|
||||
FIELD_EXPR@[94; 105)
|
||||
|
@ -107,7 +107,7 @@ SOURCE_FILE@[0; 183)
|
|||
PATH_SEGMENT@[146; 149)
|
||||
NAME_REF@[146; 149)
|
||||
IDENT@[146; 149) "vec"
|
||||
EXCL@[149; 150) "!"
|
||||
BANG@[149; 150) "!"
|
||||
TOKEN_TREE@[150; 152)
|
||||
L_BRACK@[150; 151) "["
|
||||
R_BRACK@[151; 152) "]"
|
||||
|
|
|
@ -32,7 +32,7 @@ SOURCE_FILE@[0; 139)
|
|||
WHITESPACE@[38; 39) " "
|
||||
LITERAL@[39; 40)
|
||||
INT_NUMBER@[39; 40) "1"
|
||||
SEMI@[40; 41) ";"
|
||||
SEMICOLON@[40; 41) ";"
|
||||
WHITESPACE@[41; 46) "\n "
|
||||
LET_STMT@[46; 49)
|
||||
LET_KW@[46; 49) "let"
|
||||
|
@ -48,7 +48,7 @@ SOURCE_FILE@[0; 139)
|
|||
WHITESPACE@[63; 64) " "
|
||||
LITERAL@[64; 66)
|
||||
INT_NUMBER@[64; 66) "92"
|
||||
SEMI@[66; 67) ";"
|
||||
SEMICOLON@[66; 67) ";"
|
||||
WHITESPACE@[67; 72) "\n "
|
||||
LET_STMT@[72; 75)
|
||||
LET_KW@[72; 75) "let"
|
||||
|
@ -96,12 +96,12 @@ SOURCE_FILE@[0; 139)
|
|||
R_CURLY@[137; 138) "}"
|
||||
WHITESPACE@[138; 139) "\n"
|
||||
error [24; 24): expected expression
|
||||
error [24; 24): expected SEMI
|
||||
error [24; 24): expected SEMICOLON
|
||||
error [49; 49): expected pattern
|
||||
error [49; 49): expected SEMI
|
||||
error [49; 49): expected SEMICOLON
|
||||
error [75; 75): expected pattern
|
||||
error [75; 75): expected SEMI
|
||||
error [75; 75): expected SEMICOLON
|
||||
error [98; 98): expected pattern
|
||||
error [98; 98): expected SEMI
|
||||
error [98; 98): expected SEMICOLON
|
||||
error [124; 124): expected pattern
|
||||
error [124; 124): expected SEMI
|
||||
error [124; 124): expected SEMICOLON
|
||||
|
|
|
@ -150,34 +150,34 @@ SOURCE_FILE@[0; 112)
|
|||
WHITESPACE@[111; 112) "\n"
|
||||
error [16; 16): expected expression
|
||||
error [17; 17): expected R_BRACK
|
||||
error [17; 17): expected SEMI
|
||||
error [17; 17): expected SEMICOLON
|
||||
error [17; 17): expected expression
|
||||
error [18; 18): expected SEMI
|
||||
error [18; 18): expected SEMICOLON
|
||||
error [25; 25): expected a name
|
||||
error [26; 26): expected `;`, `{`, or `(`
|
||||
error [30; 30): expected pattern
|
||||
error [31; 31): expected SEMI
|
||||
error [31; 31): expected SEMICOLON
|
||||
error [53; 53): expected expression
|
||||
error [54; 54): expected SEMI
|
||||
error [54; 54): expected SEMICOLON
|
||||
error [54; 54): expected expression
|
||||
error [55; 55): expected SEMI
|
||||
error [55; 55): expected SEMICOLON
|
||||
error [60; 60): expected type
|
||||
error [60; 60): expected `{`
|
||||
error [60; 60): expected expression
|
||||
error [61; 61): expected SEMI
|
||||
error [61; 61): expected SEMICOLON
|
||||
error [65; 65): expected pattern
|
||||
error [65; 65): expected SEMI
|
||||
error [65; 65): expected SEMICOLON
|
||||
error [65; 65): expected expression
|
||||
error [92; 92): expected expression
|
||||
error [93; 93): expected SEMI
|
||||
error [93; 93): expected SEMICOLON
|
||||
error [93; 93): expected expression
|
||||
error [94; 94): expected SEMI
|
||||
error [94; 94): expected SEMICOLON
|
||||
error [95; 95): expected expression
|
||||
error [96; 96): expected SEMI
|
||||
error [96; 96): expected SEMICOLON
|
||||
error [96; 96): expected expression
|
||||
error [97; 97): expected SEMI
|
||||
error [97; 97): expected SEMICOLON
|
||||
error [103; 103): expected a name
|
||||
error [104; 104): expected `{`
|
||||
error [108; 108): expected pattern
|
||||
error [108; 108): expected SEMI
|
||||
error [108; 108): expected SEMICOLON
|
||||
error [108; 108): expected expression
|
||||
|
|
|
@ -17,7 +17,7 @@ SOURCE_FILE@[0; 94)
|
|||
PATH_SEGMENT@[16; 19)
|
||||
NAME_REF@[16; 19)
|
||||
IDENT@[16; 19) "foo"
|
||||
EXCL@[19; 20) "!"
|
||||
BANG@[19; 20) "!"
|
||||
WHITESPACE@[20; 21) " "
|
||||
TOKEN_TREE@[21; 49)
|
||||
L_PAREN@[21; 22) "("
|
||||
|
|
|
@ -144,7 +144,7 @@ SOURCE_FILE@[0; 240)
|
|||
EXPR_STMT@[121; 123)
|
||||
ERROR@[121; 122)
|
||||
R_ANGLE@[121; 122) ">"
|
||||
SEMI@[122; 123) ";"
|
||||
SEMICOLON@[122; 123) ";"
|
||||
WHITESPACE@[123; 128) "\n "
|
||||
LET_STMT@[128; 141)
|
||||
LET_KW@[128; 131) "let"
|
||||
|
@ -219,7 +219,7 @@ SOURCE_FILE@[0; 240)
|
|||
R_PAREN@[177; 178) ")"
|
||||
R_ANGLE@[178; 179) ">"
|
||||
ERROR@[179; 180)
|
||||
SEMI@[179; 180) ";"
|
||||
SEMICOLON@[179; 180) ";"
|
||||
WHITESPACE@[180; 185) "\n "
|
||||
LET_STMT@[185; 235)
|
||||
LET_KW@[185; 188) "let"
|
||||
|
@ -286,24 +286,24 @@ SOURCE_FILE@[0; 240)
|
|||
EXPR_STMT@[235; 237)
|
||||
ERROR@[235; 236)
|
||||
R_ANGLE@[235; 236) ">"
|
||||
SEMI@[236; 237) ";"
|
||||
SEMICOLON@[236; 237) ";"
|
||||
WHITESPACE@[237; 238) "\n"
|
||||
R_CURLY@[238; 239) "}"
|
||||
WHITESPACE@[239; 240) "\n"
|
||||
error [88; 88): expected COMMA
|
||||
error [88; 88): expected R_ANGLE
|
||||
error [121; 121): expected SEMI
|
||||
error [121; 121): expected SEMICOLON
|
||||
error [121; 121): expected expression
|
||||
error [140; 140): expected type
|
||||
error [141; 141): expected R_PAREN
|
||||
error [141; 141): expected COMMA
|
||||
error [141; 141): expected R_ANGLE
|
||||
error [141; 141): expected SEMI
|
||||
error [146; 146): expected SEMI
|
||||
error [141; 141): expected SEMICOLON
|
||||
error [146; 146): expected SEMICOLON
|
||||
error [146; 146): expected expression
|
||||
error [147; 147): expected SEMI
|
||||
error [147; 147): expected SEMICOLON
|
||||
error [148; 148): expected expression
|
||||
error [149; 149): expected SEMI
|
||||
error [149; 149): expected SEMICOLON
|
||||
error [154; 154): expected pattern
|
||||
error [155; 155): expected IN_KW
|
||||
error [155; 155): expected expression
|
||||
|
@ -314,8 +314,8 @@ error [179; 179): expected expression
|
|||
error [180; 180): expected COMMA
|
||||
error [180; 180): expected expression
|
||||
error [180; 180): expected R_PAREN
|
||||
error [180; 180): expected SEMI
|
||||
error [180; 180): expected SEMICOLON
|
||||
error [215; 215): expected COMMA
|
||||
error [215; 215): expected R_ANGLE
|
||||
error [235; 235): expected SEMI
|
||||
error [235; 235): expected SEMICOLON
|
||||
error [235; 235): expected expression
|
||||
|
|
|
@ -78,7 +78,7 @@ SOURCE_FILE@[0; 575)
|
|||
WHITESPACE@[183; 184) " "
|
||||
LITERAL@[184; 185)
|
||||
INT_NUMBER@[184; 185) "1"
|
||||
SEMI@[185; 186) ";"
|
||||
SEMICOLON@[185; 186) ";"
|
||||
WHITESPACE@[186; 191) "\n "
|
||||
ENUM_DEF@[191; 223)
|
||||
ENUM_KW@[191; 195) "enum"
|
||||
|
@ -185,7 +185,7 @@ SOURCE_FILE@[0; 575)
|
|||
WHITESPACE@[507; 508) " "
|
||||
ERROR@[508; 509)
|
||||
UNDERSCORE@[508; 509) "_"
|
||||
SEMI@[509; 510) ";"
|
||||
SEMICOLON@[509; 510) ";"
|
||||
WHITESPACE@[510; 511) " "
|
||||
COMMENT@[511; 572) "//~ ERROR: expected e ..."
|
||||
WHITESPACE@[572; 573) "\n"
|
||||
|
|
|
@ -27,7 +27,7 @@ SOURCE_FILE@[0; 350)
|
|||
WHITESPACE@[30; 39) "\n "
|
||||
ATTR@[39; 83)
|
||||
POUND@[39; 40) "#"
|
||||
EXCL@[40; 41) "!"
|
||||
BANG@[40; 41) "!"
|
||||
L_BRACK@[41; 42) "["
|
||||
PATH@[42; 45)
|
||||
PATH_SEGMENT@[42; 45)
|
||||
|
@ -42,7 +42,7 @@ SOURCE_FILE@[0; 350)
|
|||
COMMENT@[92; 122) "//! Nor are ModuleDoc ..."
|
||||
WHITESPACE@[122; 127) "\n "
|
||||
R_CURLY@[127; 128) "}"
|
||||
SEMI@[128; 129) ";"
|
||||
SEMICOLON@[128; 129) ";"
|
||||
WHITESPACE@[129; 134) "\n "
|
||||
EXPR_STMT@[134; 257)
|
||||
IF_EXPR@[134; 257)
|
||||
|
@ -58,7 +58,7 @@ SOURCE_FILE@[0; 350)
|
|||
WHITESPACE@[143; 152) "\n "
|
||||
ATTR@[152; 171)
|
||||
POUND@[152; 153) "#"
|
||||
EXCL@[153; 154) "!"
|
||||
BANG@[153; 154) "!"
|
||||
L_BRACK@[154; 155) "["
|
||||
PATH@[155; 158)
|
||||
PATH_SEGMENT@[155; 158)
|
||||
|
@ -72,7 +72,7 @@ SOURCE_FILE@[0; 350)
|
|||
WHITESPACE@[171; 180) "\n "
|
||||
ATTR@[180; 212)
|
||||
POUND@[180; 181) "#"
|
||||
EXCL@[181; 182) "!"
|
||||
BANG@[181; 182) "!"
|
||||
L_BRACK@[182; 183) "["
|
||||
PATH@[183; 186)
|
||||
PATH_SEGMENT@[183; 186)
|
||||
|
@ -101,7 +101,7 @@ SOURCE_FILE@[0; 350)
|
|||
WHITESPACE@[274; 283) "\n "
|
||||
ATTR@[283; 302)
|
||||
POUND@[283; 284) "#"
|
||||
EXCL@[284; 285) "!"
|
||||
BANG@[284; 285) "!"
|
||||
L_BRACK@[285; 286) "["
|
||||
PATH@[286; 289)
|
||||
PATH_SEGMENT@[286; 289)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue