Merge branch 'master' into feature/themes

This commit is contained in:
Seivan Heidari 2019-10-31 09:43:20 +01:00
commit 8edda0e7b1
129 changed files with 5060 additions and 2866 deletions

191
Cargo.lock generated
View file

@ -108,8 +108,8 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -123,12 +123,22 @@ name = "cfg-if"
version = "0.1.10" version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "chalk-derive"
version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809#8314f2fcec8582a58c24b638f1a259d4145a0809"
dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "chalk-engine" name = "chalk-engine"
version = "0.9.0" version = "0.9.0"
source = "git+https://github.com/rust-lang/chalk.git#13303bb0067c6ed0572322080ae367ee38f9e7c9" source = "git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809#8314f2fcec8582a58c24b638f1a259d4145a0809"
dependencies = [ dependencies = [
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)", "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -136,17 +146,18 @@ dependencies = [
[[package]] [[package]]
name = "chalk-ir" name = "chalk-ir"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git#13303bb0067c6ed0572322080ae367ee38f9e7c9" source = "git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809#8314f2fcec8582a58c24b638f1a259d4145a0809"
dependencies = [ dependencies = [
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git)", "chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)", "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "chalk-macros" name = "chalk-macros"
version = "0.1.1" version = "0.1.1"
source = "git+https://github.com/rust-lang/chalk.git#13303bb0067c6ed0572322080ae367ee38f9e7c9" source = "git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809#8314f2fcec8582a58c24b638f1a259d4145a0809"
dependencies = [ dependencies = [
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -154,24 +165,25 @@ dependencies = [
[[package]] [[package]]
name = "chalk-rust-ir" name = "chalk-rust-ir"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git#13303bb0067c6ed0572322080ae367ee38f9e7c9" source = "git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809#8314f2fcec8582a58c24b638f1a259d4145a0809"
dependencies = [ dependencies = [
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git)", "chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)", "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
] ]
[[package]] [[package]]
name = "chalk-solve" name = "chalk-solve"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git#13303bb0067c6ed0572322080ae367ee38f9e7c9" source = "git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809#8314f2fcec8582a58c24b638f1a259d4145a0809"
dependencies = [ dependencies = [
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git)", "chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)", "chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", "chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", "ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -288,7 +300,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -413,7 +425,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.1.12" version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
@ -443,7 +455,7 @@ name = "heck"
version = "0.3.1" version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"unicode-segmentation 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-segmentation 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -501,7 +513,7 @@ dependencies = [
"console 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "console 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -517,7 +529,7 @@ dependencies = [
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.8.0" version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -624,7 +636,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -634,7 +646,7 @@ version = "0.61.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -790,7 +802,7 @@ dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -811,7 +823,7 @@ dependencies = [
"proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -845,7 +857,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -903,7 +915,7 @@ name = "ra_assists"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_db 0.1.0", "ra_db 0.1.0",
"ra_fmt 0.1.0", "ra_fmt 0.1.0",
@ -970,7 +982,7 @@ dependencies = [
name = "ra_fmt" name = "ra_fmt"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
] ]
@ -979,9 +991,9 @@ name = "ra_hir"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", "chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)", "chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git)", "chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)",
"ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", "ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -991,6 +1003,8 @@ dependencies = [
"ra_arena 0.1.0", "ra_arena 0.1.0",
"ra_cfg 0.1.0", "ra_cfg 0.1.0",
"ra_db 0.1.0", "ra_db 0.1.0",
"ra_hir_def 0.1.0",
"ra_hir_expand 0.1.0",
"ra_mbe 0.1.0", "ra_mbe 0.1.0",
"ra_prof 0.1.0", "ra_prof 0.1.0",
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
@ -1000,6 +1014,38 @@ dependencies = [
"test_utils 0.1.0", "test_utils 0.1.0",
] ]
[[package]]
name = "ra_hir_def"
version = "0.1.0"
dependencies = [
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_arena 0.1.0",
"ra_cfg 0.1.0",
"ra_db 0.1.0",
"ra_hir_expand 0.1.0",
"ra_mbe 0.1.0",
"ra_prof 0.1.0",
"ra_syntax 0.1.0",
"ra_tt 0.1.0",
"relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
]
[[package]]
name = "ra_hir_expand"
version = "0.1.0"
dependencies = [
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_arena 0.1.0",
"ra_db 0.1.0",
"ra_mbe 0.1.0",
"ra_prof 0.1.0",
"ra_syntax 0.1.0",
"ra_tt 0.1.0",
]
[[package]] [[package]]
name = "ra_ide_api" name = "ra_ide_api"
version = "0.1.0" version = "0.1.0"
@ -1007,7 +1053,7 @@ dependencies = [
"format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fst 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "fst 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1049,7 +1095,7 @@ dependencies = [
"ra_vfs_glob 0.1.0", "ra_vfs_glob 0.1.0",
"relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
"tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0", "test_utils 0.1.0",
@ -1065,7 +1111,7 @@ dependencies = [
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
"ra_tt 0.1.0", "ra_tt 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0", "test_utils 0.1.0",
] ]
@ -1081,7 +1127,7 @@ name = "ra_prof"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)", "backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1097,7 +1143,7 @@ dependencies = [
"ra_cfg 0.1.0", "ra_cfg 0.1.0",
"ra_db 0.1.0", "ra_db 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1106,7 +1152,7 @@ name = "ra_syntax"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_parser 0.1.0", "ra_parser 0.1.0",
"ra_text_edit 0.1.0", "ra_text_edit 0.1.0",
@ -1180,7 +1226,7 @@ name = "rand"
version = "0.7.2" version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1224,7 +1270,7 @@ name = "rand_core"
version = "0.5.1" version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1370,7 +1416,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1430,7 +1476,7 @@ dependencies = [
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa-macros 0.13.2 (registry+https://github.com/rust-lang/crates.io-index)", "salsa-macros 0.13.2 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1441,7 +1487,7 @@ dependencies = [
"heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1463,7 +1509,7 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1473,20 +1519,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.101" version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.101" version = "1.0.102"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1496,7 +1542,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1506,7 +1552,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1516,7 +1562,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1527,7 +1573,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "0.6.10" version = "0.6.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -1535,7 +1581,7 @@ name = "smol_str"
version = "0.1.15" version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1557,7 +1603,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.5" version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1647,12 +1693,12 @@ name = "unicode-normalization"
version = "0.1.8" version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "unicode-segmentation" name = "unicode-segmentation"
version = "1.3.0" version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -1673,7 +1719,7 @@ dependencies = [
"idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1682,7 +1728,7 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1759,7 +1805,7 @@ dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1794,11 +1840,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum cargo_metadata 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8d2d1617e838936c0d2323a65cc151e03ae19a7678dd24f72bccf27119b90a5d" "checksum cargo_metadata 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8d2d1617e838936c0d2323a65cc151e03ae19a7678dd24f72bccf27119b90a5d"
"checksum cc 1.0.46 (registry+https://github.com/rust-lang/crates.io-index)" = "0213d356d3c4ea2c18c40b037c3be23cd639825c18f25ee670ac7813beeef99c" "checksum cc 1.0.46 (registry+https://github.com/rust-lang/crates.io-index)" = "0213d356d3c4ea2c18c40b037c3be23cd639825c18f25ee670ac7813beeef99c"
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
"checksum chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" "checksum chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)" = "<none>"
"checksum chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" "checksum chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)" = "<none>"
"checksum chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git)" = "<none>" "checksum chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)" = "<none>"
"checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" "checksum chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)" = "<none>"
"checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git)" = "<none>" "checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)" = "<none>"
"checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=8314f2fcec8582a58c24b638f1a259d4145a0809)" = "<none>"
"checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68" "checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68"
"checksum clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e" "checksum clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
@ -1828,7 +1875,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
"checksum getrandom 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "473a1265acc8ff1e808cd0a1af8cee3c2ee5200916058a2ca113c29f2d903571" "checksum getrandom 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e7db7ca94ed4cd01190ceee0d8a8052f08a247aa1b469a7f68c6a3b71afcf407"
"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
"checksum globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2" "checksum globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2"
"checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
@ -1839,7 +1886,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0" "checksum inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0"
"checksum insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0d499dc062e841590a67230d853bce62d0abeb91304927871670b7c55c461349" "checksum insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0d499dc062e841590a67230d853bce62d0abeb91304927871670b7c55c461349"
"checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" "checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
"checksum itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5b8467d9c1cebe26feb08c640139247fac215782d35371ade9a2136ed6085358" "checksum itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "87fa75c9dea7b07be3138c49abbb83fd4bea199b5cdc76f9804458edc5da0d6e"
"checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f" "checksum itoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "501266b7edd0174f8530248f87f99c88fbe60ca4ef3dd486835b8d8d53136f7f"
"checksum jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c502a5ff9dd2924f1ed32ba96e3b65735d837b4bfd978d3161b1702e66aca4b7" "checksum jemalloc-ctl 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c502a5ff9dd2924f1ed32ba96e3b65735d837b4bfd978d3161b1702e66aca4b7"
"checksum jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45" "checksum jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45"
@ -1922,17 +1969,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" "checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
"checksum serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)" = "9796c9b7ba2ffe7a9ce53c2287dfc48080f4b2b362fcc245a259b3a7201119dd" "checksum serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4b39bd9b0b087684013a792c59e3e07a46a01d2322518d8a1104641a0b1be0"
"checksum serde_derive 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)" = "4b133a43a1ecd55d4086bd5b4dc6c1751c68b1bfbeba7a5040442022c7e7c02e" "checksum serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "ca13fc1a832f793322228923fbb3aba9f3f44444898f835d31ad1b74fa0a2bf8"
"checksum serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "2f72eb2a68a7dc3f9a691bfda9305a1c017a6215e5a4545c258500d2099a37c2" "checksum serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "2f72eb2a68a7dc3f9a691bfda9305a1c017a6215e5a4545c258500d2099a37c2"
"checksum serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "cd02c7587ec314570041b2754829f84d873ced14a96d1fd1823531e11db40573" "checksum serde_repr 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "cd02c7587ec314570041b2754829f84d873ced14a96d1fd1823531e11db40573"
"checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35" "checksum serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)" = "691b17f19fc1ec9d94ec0b5864859290dff279dbd7b03f017afda54eb36c3c35"
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
"checksum smallvec 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ab606a9c5e214920bb66c458cd7be8ef094f813f20fe77a54cc7dbfff220d4b7" "checksum smallvec 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "cefaa50e76a6f10b86f36e640eb1739eafbd4084865067778463913e43a77ff3"
"checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b" "checksum smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34836c9a295c62c2ce3514471117c5cb269891e8421b2aafdd910050576c4d8b"
"checksum stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a" "checksum stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a"
"checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f" "checksum superslice 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"
"checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf" "checksum syn 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7bedb3320d0f3035594b0b723c8a28d7d336a3eda3881db79e61d676fb644c"
"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" "checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
"checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625" "checksum termios 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72b620c5ea021d75a735c943269bb07d30c9b77d6ac6b236bc8b5c496ef05625"
"checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579" "checksum text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579"
@ -1942,7 +1989,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150" "checksum unicase 2.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150"
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
"checksum unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "141339a08b982d942be2ca06ff8b076563cbe223d1befd5450716790d44e2426" "checksum unicode-normalization 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "141339a08b982d942be2ca06ff8b076563cbe223d1befd5450716790d44e2426"
"checksum unicode-segmentation 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1967f4cdfc355b37fd76d2a954fb2ed3871034eb4f26d60537d88795cfc332a9" "checksum unicode-segmentation 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dc5415c074426c7c65db13bd647c23d78c0fb2e10dca0b8fb0f40058a59bccdf"
"checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20" "checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20"
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" "checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61" "checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"

View file

@ -1,11 +1,11 @@
//! FIXME: write short doc here //! This module defines `AssistCtx` -- the API surface that is exposed to assists.
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_db::FileRange; use ra_db::FileRange;
use ra_fmt::{leading_indent, reindent}; use ra_fmt::{leading_indent, reindent};
use ra_syntax::{ use ra_syntax::{
algo::{self, find_covering_element, find_node_at_offset}, algo::{self, find_covering_element, find_node_at_offset},
AstNode, SourceFile, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextUnit, AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextUnit,
TokenAtOffset, TokenAtOffset,
}; };
use ra_text_edit::TextEditBuilder; use ra_text_edit::TextEditBuilder;
@ -14,8 +14,8 @@ use crate::{AssistAction, AssistId, AssistLabel};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub(crate) enum Assist { pub(crate) enum Assist {
Unresolved(Vec<AssistLabel>), Unresolved { label: AssistLabel },
Resolved(Vec<(AssistLabel, AssistAction)>), Resolved { label: AssistLabel, action: AssistAction },
} }
/// `AssistCtx` allows to apply an assist or check if it could be applied. /// `AssistCtx` allows to apply an assist or check if it could be applied.
@ -54,7 +54,6 @@ pub(crate) struct AssistCtx<'a, DB> {
pub(crate) frange: FileRange, pub(crate) frange: FileRange,
source_file: SourceFile, source_file: SourceFile,
should_compute_edit: bool, should_compute_edit: bool,
assist: Assist,
} }
impl<'a, DB> Clone for AssistCtx<'a, DB> { impl<'a, DB> Clone for AssistCtx<'a, DB> {
@ -64,7 +63,6 @@ impl<'a, DB> Clone for AssistCtx<'a, DB> {
frange: self.frange, frange: self.frange,
source_file: self.source_file.clone(), source_file: self.source_file.clone(),
should_compute_edit: self.should_compute_edit, should_compute_edit: self.should_compute_edit,
assist: self.assist.clone(),
} }
} }
} }
@ -75,43 +73,41 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
F: FnOnce(AssistCtx<DB>) -> T, F: FnOnce(AssistCtx<DB>) -> T,
{ {
let parse = db.parse(frange.file_id); let parse = db.parse(frange.file_id);
let assist =
if should_compute_edit { Assist::Resolved(vec![]) } else { Assist::Unresolved(vec![]) };
let ctx = AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit, assist }; let ctx = AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit };
f(ctx) f(ctx)
} }
pub(crate) fn add_action( pub(crate) fn add_assist(
&mut self, self,
id: AssistId, id: AssistId,
label: impl Into<String>, label: impl Into<String>,
f: impl FnOnce(&mut AssistBuilder), f: impl FnOnce(&mut AssistBuilder),
) -> &mut Self { ) -> Option<Assist> {
let label = AssistLabel { label: label.into(), id }; let label = AssistLabel { label: label.into(), id };
match &mut self.assist { let assist = if self.should_compute_edit {
Assist::Unresolved(labels) => labels.push(label), let action = {
Assist::Resolved(labels_actions) => { let mut edit = AssistBuilder::default();
let action = { f(&mut edit);
let mut edit = AssistBuilder::default(); edit.build()
f(&mut edit); };
edit.build() Assist::Resolved { label, action }
}; } else {
labels_actions.push((label, action)); Assist::Unresolved { label }
} };
}
self
}
pub(crate) fn build(self) -> Option<Assist> { Some(assist)
Some(self.assist)
} }
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
self.source_file.syntax().token_at_offset(self.frange.range.start()) self.source_file.syntax().token_at_offset(self.frange.range.start())
} }
pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> { pub(crate) fn find_token_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
self.token_at_offset().find(|it| it.kind() == kind)
}
pub(crate) fn find_node_at_offset<N: AstNode>(&self) -> Option<N> {
find_node_at_offset(self.source_file.syntax(), self.frange.range.start()) find_node_at_offset(self.source_file.syntax(), self.frange.range.start())
} }
pub(crate) fn covering_element(&self) -> SyntaxElement { pub(crate) fn covering_element(&self) -> SyntaxElement {

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, AttrsOwner}, ast::{self, AstNode, AttrsOwner},
@ -9,10 +7,28 @@ use ra_syntax::{
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: add_derive
let nominal = ctx.node_at_offset::<ast::NominalDef>()?; //
// Adds a new `#[derive()]` clause to a struct or enum.
//
// ```
// struct Point {
// x: u32,
// y: u32,<|>
// }
// ```
// ->
// ```
// #[derive()]
// struct Point {
// x: u32,
// y: u32,
// }
// ```
pub(crate) fn add_derive(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?;
let node_start = derive_insertion_offset(&nominal)?; let node_start = derive_insertion_offset(&nominal)?;
ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| { ctx.add_assist(AssistId("add_derive"), "add `#[derive]`", |edit| {
let derive_attr = nominal let derive_attr = nominal
.attrs() .attrs()
.filter_map(|x| x.as_simple_call()) .filter_map(|x| x.as_simple_call())
@ -28,9 +44,7 @@ pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist>
}; };
edit.target(nominal.syntax().text_range()); edit.target(nominal.syntax().text_range());
edit.set_cursor(offset) edit.set_cursor(offset)
}); })
ctx.build()
} }
// Insert `derive` after doc comments. // Insert `derive` after doc comments.

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use hir::{db::HirDatabase, HirDisplay, Ty}; use hir::{db::HirDatabase, HirDisplay, Ty};
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, LetStmt, NameOwner}, ast::{self, AstNode, LetStmt, NameOwner},
@ -8,9 +6,23 @@ use ra_syntax::{
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
/// Add explicit type assist. // Assist: add_explicit_type
pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { //
let stmt = ctx.node_at_offset::<LetStmt>()?; // Specify type for a let binding.
//
// ```
// fn main() {
// let x<|> = 92;
// }
// ```
// ->
// ```
// fn main() {
// let x: i32 = 92;
// }
// ```
pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let stmt = ctx.find_node_at_offset::<LetStmt>()?;
let expr = stmt.initializer()?; let expr = stmt.initializer()?;
let pat = stmt.pat()?; let pat = stmt.pat()?;
// Must be a binding // Must be a binding
@ -35,11 +47,10 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option<
return None; return None;
} }
ctx.add_action(AssistId("add_explicit_type"), "add explicit type", |edit| { ctx.add_assist(AssistId("add_explicit_type"), "add explicit type", |edit| {
edit.target(pat_range); edit.target(pat_range);
edit.insert(name_range.end(), format!(": {}", ty.display(db))); edit.insert(name_range.end(), format!(": {}", ty.display(db)));
}); })
ctx.build()
} }
/// Returns true if any type parameter is unknown /// Returns true if any type parameter is unknown

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use format_buf::format; use format_buf::format;
use hir::db::HirDatabase; use hir::db::HirDatabase;
use join_to_string::join; use join_to_string::join;
@ -10,10 +8,29 @@ use ra_syntax::{
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: add_impl
let nominal = ctx.node_at_offset::<ast::NominalDef>()?; //
// Adds a new inherent impl for a type.
//
// ```
// struct Ctx<T: Clone> {
// data: T,<|>
// }
// ```
// ->
// ```
// struct Ctx<T: Clone> {
// data: T,
// }
//
// impl<T: Clone> Ctx<T> {
//
// }
// ```
pub(crate) fn add_impl(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?;
let name = nominal.name()?; let name = nominal.name()?;
ctx.add_action(AssistId("add_impl"), "add impl", |edit| { ctx.add_assist(AssistId("add_impl"), "add impl", |edit| {
edit.target(nominal.syntax().text_range()); edit.target(nominal.syntax().text_range());
let type_params = nominal.type_param_list(); let type_params = nominal.type_param_list();
let start_offset = nominal.syntax().text_range().end(); let start_offset = nominal.syntax().text_range().end();
@ -37,9 +54,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
edit.set_cursor(start_offset + TextUnit::of_str(&buf)); edit.set_cursor(start_offset + TextUnit::of_str(&buf));
buf.push_str("\n}"); buf.push_str("\n}");
edit.insert(start_offset, buf); edit.insert(start_offset, buf);
}); })
ctx.build()
} }
#[cfg(test)] #[cfg(test)]

View file

@ -1,18 +1,81 @@
//! FIXME: write short doc here
use hir::{self, db::HirDatabase}; use hir::{self, db::HirDatabase};
use ra_text_edit::TextEditBuilder;
use crate::{
assist_ctx::{Assist, AssistCtx},
AssistId,
};
use ra_syntax::{ use ra_syntax::{
ast::{self, NameOwner}, ast::{self, NameOwner},
AstNode, Direction, SmolStr, AstNode, Direction, SmolStr,
SyntaxKind::{PATH, PATH_SEGMENT}, SyntaxKind::{PATH, PATH_SEGMENT},
SyntaxNode, TextRange, T, SyntaxNode, TextRange, T,
}; };
use ra_text_edit::TextEditBuilder;
use crate::{
assist_ctx::{Assist, AssistCtx},
AssistId,
};
/// This function produces sequence of text edits into edit
/// to import the target path in the most appropriate scope given
/// the cursor position
pub fn auto_import_text_edit(
// Ideally the position of the cursor, used to
position: &SyntaxNode,
// The statement to use as anchor (last resort)
anchor: &SyntaxNode,
// The path to import as a sequence of strings
target: &[SmolStr],
edit: &mut TextEditBuilder,
) {
let container = position.ancestors().find_map(|n| {
if let Some(module) = ast::Module::cast(n.clone()) {
return module.item_list().map(|it| it.syntax().clone());
}
ast::SourceFile::cast(n).map(|it| it.syntax().clone())
});
if let Some(container) = container {
let action = best_action_for_target(container, anchor.clone(), target);
make_assist(&action, target, edit);
}
}
// Assist: add_import
//
// Adds a use statement for a given fully-qualified path.
//
// ```
// fn process(map: std::collections::<|>HashMap<String, String>) {}
// ```
// ->
// ```
// use std::collections::HashMap;
//
// fn process(map: HashMap<String, String>) {}
// ```
pub(crate) fn add_import(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let path: ast::Path = ctx.find_node_at_offset()?;
// We don't want to mess with use statements
if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() {
return None;
}
let hir_path = hir::Path::from_ast(path.clone())?;
let segments = collect_hir_path_segments(&hir_path)?;
if segments.len() < 2 {
return None;
}
let module = path.syntax().ancestors().find_map(ast::Module::cast);
let position = match module.and_then(|it| it.item_list()) {
Some(item_list) => item_list.syntax().clone(),
None => {
let current_file = path.syntax().ancestors().find_map(ast::SourceFile::cast)?;
current_file.syntax().clone()
}
};
ctx.add_assist(AssistId("add_import"), format!("import {}", fmt_segments(&segments)), |edit| {
apply_auto_import(&position, &path, &segments, edit.text_edit_builder());
})
}
fn collect_path_segments_raw( fn collect_path_segments_raw(
segments: &mut Vec<ast::PathSegment>, segments: &mut Vec<ast::PathSegment>,
@ -61,9 +124,9 @@ fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
} }
} }
// Returns the numeber of common segments. /// Returns the number of common segments.
fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize { fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize {
left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count() left.iter().zip(right).take_while(|(l, r)| compare_path_segment(l, r)).count()
} }
fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool { fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool {
@ -84,7 +147,7 @@ fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool {
a == b.text() a == b.text()
} }
#[derive(Clone)] #[derive(Clone, Debug)]
enum ImportAction { enum ImportAction {
Nothing, Nothing,
// Add a brand new use statement. // Add a brand new use statement.
@ -154,10 +217,18 @@ impl ImportAction {
( (
ImportAction::AddNestedImport { common_segments: n, .. }, ImportAction::AddNestedImport { common_segments: n, .. },
ImportAction::AddInTreeList { common_segments: m, .. }, ImportAction::AddInTreeList { common_segments: m, .. },
) => n > m, )
( | (
ImportAction::AddInTreeList { common_segments: n, .. }, ImportAction::AddInTreeList { common_segments: n, .. },
ImportAction::AddNestedImport { common_segments: m, .. }, ImportAction::AddNestedImport { common_segments: m, .. },
)
| (
ImportAction::AddInTreeList { common_segments: n, .. },
ImportAction::AddInTreeList { common_segments: m, .. },
)
| (
ImportAction::AddNestedImport { common_segments: n, .. },
ImportAction::AddNestedImport { common_segments: m, .. },
) => n > m, ) => n > m,
(ImportAction::AddInTreeList { .. }, _) => true, (ImportAction::AddInTreeList { .. }, _) => true,
(ImportAction::AddNestedImport { .. }, ImportAction::Nothing) => false, (ImportAction::AddNestedImport { .. }, ImportAction::Nothing) => false,
@ -226,7 +297,7 @@ fn walk_use_tree_for_best_action(
common if common == left.len() && left.len() == right.len() => { common if common == left.len() && left.len() == right.len() => {
// e.g: target is std::fmt and we can have // e.g: target is std::fmt and we can have
// 1- use std::fmt; // 1- use std::fmt;
// 2- use std::fmt:{ ... } // 2- use std::fmt::{ ... }
if let Some(list) = tree_list { if let Some(list) = tree_list {
// In case 2 we need to add self to the nested list // In case 2 we need to add self to the nested list
// unless it's already there // unless it's already there
@ -474,7 +545,7 @@ fn make_assist_add_nested_import(
if add_colon_colon { if add_colon_colon {
buf.push_str("::"); buf.push_str("::");
} }
buf.push_str("{ "); buf.push_str("{");
if add_self { if add_self {
buf.push_str("self, "); buf.push_str("self, ");
} }
@ -505,7 +576,7 @@ fn apply_auto_import(
} }
} }
pub fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> { fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> {
let mut ps = Vec::<SmolStr>::with_capacity(10); let mut ps = Vec::<SmolStr>::with_capacity(10);
match path.kind { match path.kind {
hir::PathKind::Abs => ps.push("".into()), hir::PathKind::Abs => ps.push("".into()),
@ -521,87 +592,16 @@ pub fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> {
Some(ps) Some(ps)
} }
// This function produces sequence of text edits into edit
// to import the target path in the most appropriate scope given
// the cursor position
pub fn auto_import_text_edit(
// Ideally the position of the cursor, used to
position: &SyntaxNode,
// The statement to use as anchor (last resort)
anchor: &SyntaxNode,
// The path to import as a sequence of strings
target: &[SmolStr],
edit: &mut TextEditBuilder,
) {
let container = position.ancestors().find_map(|n| {
if let Some(module) = ast::Module::cast(n.clone()) {
return module.item_list().map(|it| it.syntax().clone());
}
ast::SourceFile::cast(n).map(|it| it.syntax().clone())
});
if let Some(container) = container {
let action = best_action_for_target(container, anchor.clone(), target);
make_assist(&action, target, edit);
}
}
pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let path: ast::Path = ctx.node_at_offset()?;
// We don't want to mess with use statements
if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() {
return None;
}
let hir_path = hir::Path::from_ast(path.clone())?;
let segments = collect_hir_path_segments(&hir_path)?;
if segments.len() < 2 {
return None;
}
if let Some(module) = path.syntax().ancestors().find_map(ast::Module::cast) {
if let (Some(item_list), Some(name)) = (module.item_list(), module.name()) {
ctx.add_action(
AssistId("auto_import"),
format!("import {} in mod {}", fmt_segments(&segments), name.text()),
|edit| {
apply_auto_import(
item_list.syntax(),
&path,
&segments,
edit.text_edit_builder(),
);
},
);
}
} else {
let current_file = path.syntax().ancestors().find_map(ast::SourceFile::cast)?;
ctx.add_action(
AssistId("auto_import"),
format!("import {} in the current file", fmt_segments(&segments)),
|edit| {
apply_auto_import(
current_file.syntax(),
&path,
&segments,
edit.text_edit_builder(),
);
},
);
}
ctx.build()
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use crate::helpers::{check_assist, check_assist_not_applicable}; use crate::helpers::{check_assist, check_assist_not_applicable};
use super::*;
#[test] #[test]
fn test_auto_import_add_use_no_anchor() { fn test_auto_import_add_use_no_anchor() {
check_assist( check_assist(
auto_import, add_import,
" "
std::fmt::Debug<|> std::fmt::Debug<|>
", ",
@ -615,7 +615,7 @@ Debug<|>
#[test] #[test]
fn test_auto_import_add_use_no_anchor_with_item_below() { fn test_auto_import_add_use_no_anchor_with_item_below() {
check_assist( check_assist(
auto_import, add_import,
" "
std::fmt::Debug<|> std::fmt::Debug<|>
@ -636,7 +636,7 @@ fn main() {
#[test] #[test]
fn test_auto_import_add_use_no_anchor_with_item_above() { fn test_auto_import_add_use_no_anchor_with_item_above() {
check_assist( check_assist(
auto_import, add_import,
" "
fn main() { fn main() {
} }
@ -657,7 +657,7 @@ Debug<|>
#[test] #[test]
fn test_auto_import_add_use_no_anchor_2seg() { fn test_auto_import_add_use_no_anchor_2seg() {
check_assist( check_assist(
auto_import, add_import,
" "
std::fmt<|>::Debug std::fmt<|>::Debug
", ",
@ -672,7 +672,7 @@ fmt<|>::Debug
#[test] #[test]
fn test_auto_import_add_use() { fn test_auto_import_add_use() {
check_assist( check_assist(
auto_import, add_import,
" "
use stdx; use stdx;
@ -692,7 +692,7 @@ impl Debug<|> for Foo {
#[test] #[test]
fn test_auto_import_file_use_other_anchor() { fn test_auto_import_file_use_other_anchor() {
check_assist( check_assist(
auto_import, add_import,
" "
impl std::fmt::Debug<|> for Foo { impl std::fmt::Debug<|> for Foo {
} }
@ -709,7 +709,7 @@ impl Debug<|> for Foo {
#[test] #[test]
fn test_auto_import_add_use_other_anchor_indent() { fn test_auto_import_add_use_other_anchor_indent() {
check_assist( check_assist(
auto_import, add_import,
" "
impl std::fmt::Debug<|> for Foo { impl std::fmt::Debug<|> for Foo {
} }
@ -726,7 +726,7 @@ impl Debug<|> for Foo {
#[test] #[test]
fn test_auto_import_split_different() { fn test_auto_import_split_different() {
check_assist( check_assist(
auto_import, add_import,
" "
use std::fmt; use std::fmt;
@ -734,7 +734,7 @@ impl std::io<|> for Foo {
} }
", ",
" "
use std::{ io, fmt}; use std::{io, fmt};
impl io<|> for Foo { impl io<|> for Foo {
} }
@ -745,7 +745,7 @@ impl io<|> for Foo {
#[test] #[test]
fn test_auto_import_split_self_for_use() { fn test_auto_import_split_self_for_use() {
check_assist( check_assist(
auto_import, add_import,
" "
use std::fmt; use std::fmt;
@ -753,7 +753,7 @@ impl std::fmt::Debug<|> for Foo {
} }
", ",
" "
use std::fmt::{ self, Debug, }; use std::fmt::{self, Debug, };
impl Debug<|> for Foo { impl Debug<|> for Foo {
} }
@ -764,7 +764,7 @@ impl Debug<|> for Foo {
#[test] #[test]
fn test_auto_import_split_self_for_target() { fn test_auto_import_split_self_for_target() {
check_assist( check_assist(
auto_import, add_import,
" "
use std::fmt::Debug; use std::fmt::Debug;
@ -772,7 +772,7 @@ impl std::fmt<|> for Foo {
} }
", ",
" "
use std::fmt::{ self, Debug}; use std::fmt::{self, Debug};
impl fmt<|> for Foo { impl fmt<|> for Foo {
} }
@ -783,7 +783,7 @@ impl fmt<|> for Foo {
#[test] #[test]
fn test_auto_import_add_to_nested_self_nested() { fn test_auto_import_add_to_nested_self_nested() {
check_assist( check_assist(
auto_import, add_import,
" "
use std::fmt::{Debug, nested::{Display}}; use std::fmt::{Debug, nested::{Display}};
@ -802,7 +802,7 @@ impl nested<|> for Foo {
#[test] #[test]
fn test_auto_import_add_to_nested_self_already_included() { fn test_auto_import_add_to_nested_self_already_included() {
check_assist( check_assist(
auto_import, add_import,
" "
use std::fmt::{Debug, nested::{self, Display}}; use std::fmt::{Debug, nested::{self, Display}};
@ -821,7 +821,7 @@ impl nested<|> for Foo {
#[test] #[test]
fn test_auto_import_add_to_nested_nested() { fn test_auto_import_add_to_nested_nested() {
check_assist( check_assist(
auto_import, add_import,
" "
use std::fmt::{Debug, nested::{Display}}; use std::fmt::{Debug, nested::{Display}};
@ -840,7 +840,7 @@ impl Debug<|> for Foo {
#[test] #[test]
fn test_auto_import_split_common_target_longer() { fn test_auto_import_split_common_target_longer() {
check_assist( check_assist(
auto_import, add_import,
" "
use std::fmt::Debug; use std::fmt::Debug;
@ -848,7 +848,7 @@ impl std::fmt::nested::Display<|> for Foo {
} }
", ",
" "
use std::fmt::{ nested::Display, Debug}; use std::fmt::{nested::Display, Debug};
impl Display<|> for Foo { impl Display<|> for Foo {
} }
@ -859,7 +859,7 @@ impl Display<|> for Foo {
#[test] #[test]
fn test_auto_import_split_common_use_longer() { fn test_auto_import_split_common_use_longer() {
check_assist( check_assist(
auto_import, add_import,
" "
use std::fmt::nested::Debug; use std::fmt::nested::Debug;
@ -867,7 +867,7 @@ impl std::fmt::Display<|> for Foo {
} }
", ",
" "
use std::fmt::{ Display, nested::Debug}; use std::fmt::{Display, nested::Debug};
impl Display<|> for Foo { impl Display<|> for Foo {
} }
@ -875,10 +875,33 @@ impl Display<|> for Foo {
); );
} }
#[test]
fn test_auto_import_use_nested_import() {
check_assist(
add_import,
"
use crate::{
ty::{Substs, Ty},
AssocItem,
};
fn foo() { crate::ty::lower<|>::trait_env() }
",
"
use crate::{
ty::{Substs, Ty, lower},
AssocItem,
};
fn foo() { lower<|>::trait_env() }
",
);
}
#[test] #[test]
fn test_auto_import_alias() { fn test_auto_import_alias() {
check_assist( check_assist(
auto_import, add_import,
" "
use std::fmt as foo; use std::fmt as foo;
@ -897,7 +920,7 @@ impl Debug<|> for Foo {
#[test] #[test]
fn test_auto_import_not_applicable_one_segment() { fn test_auto_import_not_applicable_one_segment() {
check_assist_not_applicable( check_assist_not_applicable(
auto_import, add_import,
" "
impl foo<|> for Foo { impl foo<|> for Foo {
} }
@ -908,7 +931,7 @@ impl foo<|> for Foo {
#[test] #[test]
fn test_auto_import_not_applicable_in_use() { fn test_auto_import_not_applicable_in_use() {
check_assist_not_applicable( check_assist_not_applicable(
auto_import, add_import,
" "
use std::fmt<|>; use std::fmt<|>;
", ",
@ -918,7 +941,7 @@ use std::fmt<|>;
#[test] #[test]
fn test_auto_import_add_use_no_anchor_in_mod_mod() { fn test_auto_import_add_use_no_anchor_in_mod_mod() {
check_assist( check_assist(
auto_import, add_import,
" "
mod foo { mod foo {
mod bar { mod bar {

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use hir::{db::HirDatabase, HasSource}; use hir::{db::HirDatabase, HasSource};
use ra_syntax::{ use ra_syntax::{
ast::{self, edit, make, AstNode, NameOwner}, ast::{self, edit, make, AstNode, NameOwner},
@ -14,6 +12,34 @@ enum AddMissingImplMembersMode {
NoDefaultMethods, NoDefaultMethods,
} }
// Assist: add_impl_missing_members
//
// Adds scaffold for required impl members.
//
// ```
// trait T {
// Type X;
// fn foo(&self);
// fn bar(&self) {}
// }
//
// impl T for () {<|>
//
// }
// ```
// ->
// ```
// trait T {
// Type X;
// fn foo(&self);
// fn bar(&self) {}
// }
//
// impl T for () {
// fn foo(&self) { unimplemented!() }
//
// }
// ```
pub(crate) fn add_missing_impl_members(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_missing_impl_members(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
add_missing_impl_members_inner( add_missing_impl_members_inner(
ctx, ctx,
@ -23,6 +49,38 @@ pub(crate) fn add_missing_impl_members(ctx: AssistCtx<impl HirDatabase>) -> Opti
) )
} }
// Assist: add_impl_default_members
//
// Adds scaffold for overriding default impl members.
//
// ```
// trait T {
// Type X;
// fn foo(&self);
// fn bar(&self) {}
// }
//
// impl T for () {
// Type X = ();
// fn foo(&self) {}<|>
//
// }
// ```
// ->
// ```
// trait T {
// Type X;
// fn foo(&self);
// fn bar(&self) {}
// }
//
// impl T for () {
// Type X = ();
// fn foo(&self) {}
// fn bar(&self) {}
//
// }
// ```
pub(crate) fn add_missing_default_members(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_missing_default_members(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
add_missing_impl_members_inner( add_missing_impl_members_inner(
ctx, ctx,
@ -33,12 +91,12 @@ pub(crate) fn add_missing_default_members(ctx: AssistCtx<impl HirDatabase>) -> O
} }
fn add_missing_impl_members_inner( fn add_missing_impl_members_inner(
mut ctx: AssistCtx<impl HirDatabase>, ctx: AssistCtx<impl HirDatabase>,
mode: AddMissingImplMembersMode, mode: AddMissingImplMembersMode,
assist_id: &'static str, assist_id: &'static str,
label: &'static str, label: &'static str,
) -> Option<Assist> { ) -> Option<Assist> {
let impl_node = ctx.node_at_offset::<ast::ImplBlock>()?; let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?;
let impl_item_list = impl_node.item_list()?; let impl_item_list = impl_node.item_list()?;
let trait_def = { let trait_def = {
@ -75,7 +133,7 @@ fn add_missing_impl_members_inner(
return None; return None;
} }
ctx.add_action(AssistId(assist_id), label, |edit| { ctx.add_assist(AssistId(assist_id), label, |edit| {
let n_existing_items = impl_item_list.impl_items().count(); let n_existing_items = impl_item_list.impl_items().count();
let items = missing_items let items = missing_items
.into_iter() .into_iter()
@ -92,9 +150,7 @@ fn add_missing_impl_members_inner(
edit.replace_ast(impl_item_list, new_impl_item_list); edit.replace_ast(impl_item_list, new_impl_item_list);
edit.set_cursor(cursor_position); edit.set_cursor(cursor_position);
}); })
ctx.build()
} }
fn add_body(fn_def: ast::FnDef) -> ast::FnDef { fn add_body(fn_def: ast::FnDef) -> ast::FnDef {

View file

@ -1,20 +1,30 @@
//! This contains the functions associated with the demorgan assist.
//! This assist transforms boolean expressions of the form `!a || !b` into
//! `!(a && b)`.
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::ast::{self, AstNode}; use ra_syntax::ast::{self, AstNode};
use ra_syntax::SyntaxNode; use ra_syntax::SyntaxNode;
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
/// Assist for applying demorgan's law // Assist: apply_demorgan
/// //
/// This transforms expressions of the form `!l || !r` into `!(l && r)`. // Apply [De Morgan's law](https://en.wikipedia.org/wiki/De_Morgan%27s_laws).
/// This also works with `&&`. This assist can only be applied with the cursor // This transforms expressions of the form `!l || !r` into `!(l && r)`.
/// on either `||` or `&&`, with both operands being a negation of some kind. // This also works with `&&`. This assist can only be applied with the cursor
/// This means something of the form `!x` or `x != y`. // on either `||` or `&&`, with both operands being a negation of some kind.
pub(crate) fn apply_demorgan(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // This means something of the form `!x` or `x != y`.
let expr = ctx.node_at_offset::<ast::BinExpr>()?; //
// ```
// fn main() {
// if x != 4 ||<|> !y {}
// }
// ```
// ->
// ```
// fn main() {
// if !(x == 4 && y) {}
// }
// ```
pub(crate) fn apply_demorgan(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let expr = ctx.find_node_at_offset::<ast::BinExpr>()?;
let op = expr.op_kind()?; let op = expr.op_kind()?;
let op_range = expr.op_token()?.text_range(); let op_range = expr.op_token()?.text_range();
let opposite_op = opposite_logic_op(op)?; let opposite_op = opposite_logic_op(op)?;
@ -29,13 +39,12 @@ pub(crate) fn apply_demorgan(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Ass
let not_lhs = undo_negation(lhs)?; let not_lhs = undo_negation(lhs)?;
let not_rhs = undo_negation(rhs)?; let not_rhs = undo_negation(rhs)?;
ctx.add_action(AssistId("apply_demorgan"), "apply demorgan's law", |edit| { ctx.add_assist(AssistId("apply_demorgan"), "apply demorgan's law", |edit| {
edit.target(op_range); edit.target(op_range);
edit.replace(op_range, opposite_op); edit.replace(op_range, opposite_op);
edit.replace(lhs_range, format!("!({}", not_lhs)); edit.replace(lhs_range, format!("!({}", not_lhs));
edit.replace(rhs_range, format!("{})", not_rhs)); edit.replace(rhs_range, format!("{})", not_rhs));
}); })
ctx.build()
} }
// Return the opposite text for a given logical operator, if it makes sense // Return the opposite text for a given logical operator, if it makes sense

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, NameOwner, VisibilityOwner}, ast::{self, NameOwner, VisibilityOwner},
@ -13,14 +11,25 @@ use ra_syntax::{
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
// Assist: change_visibility
//
// Adds or changes existing visibility specifier.
//
// ```
// <|>fn frobnicate() {}
// ```
// ->
// ```
// pub(crate) fn frobnicate() {}
// ```
pub(crate) fn change_visibility(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn change_visibility(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
if let Some(vis) = ctx.node_at_offset::<ast::Visibility>() { if let Some(vis) = ctx.find_node_at_offset::<ast::Visibility>() {
return change_vis(ctx, vis); return change_vis(ctx, vis);
} }
add_vis(ctx) add_vis(ctx)
} }
fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { fn add_vis(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() {
T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true, T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true,
_ => false, _ => false,
@ -48,13 +57,11 @@ fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
(vis_offset(field.syntax()), ident.text_range()) (vis_offset(field.syntax()), ident.text_range())
}; };
ctx.add_action(AssistId("change_visibility"), "make pub(crate)", |edit| { ctx.add_assist(AssistId("change_visibility"), "make pub(crate)", |edit| {
edit.target(target); edit.target(target);
edit.insert(offset, "pub(crate) "); edit.insert(offset, "pub(crate) ");
edit.set_cursor(offset); edit.set_cursor(offset);
}); })
ctx.build()
} }
fn vis_offset(node: &SyntaxNode) -> TextUnit { fn vis_offset(node: &SyntaxNode) -> TextUnit {
@ -68,24 +75,20 @@ fn vis_offset(node: &SyntaxNode) -> TextUnit {
.unwrap_or_else(|| node.text_range().start()) .unwrap_or_else(|| node.text_range().start())
} }
fn change_vis(mut ctx: AssistCtx<impl HirDatabase>, vis: ast::Visibility) -> Option<Assist> { fn change_vis(ctx: AssistCtx<impl HirDatabase>, vis: ast::Visibility) -> Option<Assist> {
if vis.syntax().text() == "pub" { if vis.syntax().text() == "pub" {
ctx.add_action(AssistId("change_visibility"), "change to pub(crate)", |edit| { return ctx.add_assist(AssistId("change_visibility"), "change to pub(crate)", |edit| {
edit.target(vis.syntax().text_range()); edit.target(vis.syntax().text_range());
edit.replace(vis.syntax().text_range(), "pub(crate)"); edit.replace(vis.syntax().text_range(), "pub(crate)");
edit.set_cursor(vis.syntax().text_range().start()) edit.set_cursor(vis.syntax().text_range().start())
}); });
return ctx.build();
} }
if vis.syntax().text() == "pub(crate)" { if vis.syntax().text() == "pub(crate)" {
ctx.add_action(AssistId("change_visibility"), "change to pub", |edit| { return ctx.add_assist(AssistId("change_visibility"), "change to pub", |edit| {
edit.target(vis.syntax().text_range()); edit.target(vis.syntax().text_range());
edit.replace(vis.syntax().text_range(), "pub"); edit.replace(vis.syntax().text_range(), "pub");
edit.set_cursor(vis.syntax().text_range().start()); edit.set_cursor(vis.syntax().text_range().start());
}); });
return ctx.build();
} }
None None
} }

View file

@ -1,26 +1,3 @@
//! Assist: `convert_to_guarded_return`
//!
//! Replace a large conditional with a guarded return.
//!
//! ```text
//! fn <|>main() {
//! if cond {
//! foo();
//! bar();
//! }
//! }
//! ```
//! ->
//! ```text
//! fn main() {
//! if !cond {
//! return;
//! }
//! foo();
//! bar();
//! }
//! ```
use std::ops::RangeInclusive; use std::ops::RangeInclusive;
use hir::db::HirDatabase; use hir::db::HirDatabase;
@ -36,8 +13,30 @@ use crate::{
AssistId, AssistId,
}; };
pub(crate) fn convert_to_guarded_return(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: convert_to_guarded_return
let if_expr: ast::IfExpr = ctx.node_at_offset()?; //
// Replace a large conditional with a guarded return.
//
// ```
// fn main() {
// <|>if cond {
// foo();
// bar();
// }
// }
// ```
// ->
// ```
// fn main() {
// if !cond {
// return;
// }
// foo();
// bar();
// }
// ```
pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
let expr = if_expr.condition()?.expr()?; let expr = if_expr.condition()?.expr()?;
let then_block = if_expr.then_branch()?.block()?; let then_block = if_expr.then_branch()?.block()?;
if if_expr.else_branch().is_some() { if if_expr.else_branch().is_some() {
@ -51,7 +50,7 @@ pub(crate) fn convert_to_guarded_return(mut ctx: AssistCtx<impl HirDatabase>) ->
} }
// check for early return and continue // check for early return and continue
let first_in_then_block = then_block.syntax().first_child()?.clone(); let first_in_then_block = then_block.syntax().first_child()?;
if ast::ReturnExpr::can_cast(first_in_then_block.kind()) if ast::ReturnExpr::can_cast(first_in_then_block.kind())
|| ast::ContinueExpr::can_cast(first_in_then_block.kind()) || ast::ContinueExpr::can_cast(first_in_then_block.kind())
|| first_in_then_block || first_in_then_block
@ -76,7 +75,7 @@ pub(crate) fn convert_to_guarded_return(mut ctx: AssistCtx<impl HirDatabase>) ->
then_block.syntax().last_child_or_token().filter(|t| t.kind() == R_CURLY)?; then_block.syntax().last_child_or_token().filter(|t| t.kind() == R_CURLY)?;
let cursor_position = ctx.frange.range.start(); let cursor_position = ctx.frange.range.start();
ctx.add_action(AssistId("convert_to_guarded_return"), "convert to guarded return", |edit| { ctx.add_assist(AssistId("convert_to_guarded_return"), "convert to guarded return", |edit| {
let if_indent_level = IndentLevel::from_node(&if_expr.syntax()); let if_indent_level = IndentLevel::from_node(&if_expr.syntax());
let new_if_expr = let new_if_expr =
if_indent_level.increase_indent(make::if_expression(&expr, early_expression)); if_indent_level.increase_indent(make::if_expression(&expr, early_expression));
@ -106,8 +105,7 @@ pub(crate) fn convert_to_guarded_return(mut ctx: AssistCtx<impl HirDatabase>) ->
edit.target(if_expr.syntax().text_range()); edit.target(if_expr.syntax().text_range());
edit.replace_ast(parent_block, ast::Block::cast(new_block).unwrap()); edit.replace_ast(parent_block, ast::Block::cast(new_block).unwrap());
edit.set_cursor(cursor_position); edit.set_cursor(cursor_position);
}); })
ctx.build()
} }
#[cfg(test)] #[cfg(test)]

View file

@ -7,8 +7,32 @@ use ra_syntax::ast::{self, edit::IndentLevel, make, AstNode, NameOwner};
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: fill_match_arms
let match_expr = ctx.node_at_offset::<ast::MatchExpr>()?; //
// Adds missing clauses to a `match` expression.
//
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// match action {
// <|>
// }
// }
// ```
// ->
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// match action {
// Action::Move { distance } => (),
// Action::Stop => (),
// }
// }
// ```
pub(crate) fn fill_match_arms(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let match_expr = ctx.find_node_at_offset::<ast::MatchExpr>()?;
let match_arm_list = match_expr.match_arm_list()?; let match_arm_list = match_expr.match_arm_list()?;
// We already have some match arms, so we don't provide any assists. // We already have some match arms, so we don't provide any assists.
@ -29,7 +53,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
}; };
let variant_list = enum_def.variant_list()?; let variant_list = enum_def.variant_list()?;
ctx.add_action(AssistId("fill_match_arms"), "fill match arms", |edit| { ctx.add_assist(AssistId("fill_match_arms"), "fill match arms", |edit| {
let indent_level = IndentLevel::from_node(match_arm_list.syntax()); let indent_level = IndentLevel::from_node(match_arm_list.syntax());
let new_arm_list = { let new_arm_list = {
@ -43,9 +67,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
edit.target(match_expr.syntax().text_range()); edit.target(match_expr.syntax().text_range());
edit.set_cursor(expr.syntax().text_range().start()); edit.set_cursor(expr.syntax().text_range().start());
edit.replace_ast(match_arm_list, new_arm_list); edit.replace_ast(match_arm_list, new_arm_list);
}); })
ctx.build()
} }
fn is_trivial(arm: &ast::MatchArm) -> bool { fn is_trivial(arm: &ast::MatchArm) -> bool {
@ -130,7 +152,7 @@ mod tests {
A::Bs => (), A::Bs => (),
A::Cs(_) => (), A::Cs(_) => (),
A::Ds(_, _) => (), A::Ds(_, _) => (),
A::Es{ x, y } => (), A::Es { x, y } => (),
} }
} }
"#, "#,
@ -183,7 +205,7 @@ mod tests {
fn foo(a: &mut A) { fn foo(a: &mut A) {
match <|>a { match <|>a {
A::Es{ x, y } => (), A::Es { x, y } => (),
} }
} }
"#, "#,

View file

@ -1,13 +1,25 @@
//! FIXME: write short doc here
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::ast::{AstNode, BinExpr, BinOp}; use ra_syntax::ast::{AstNode, BinExpr, BinOp};
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
/// Flip binary expression assist. // Assist: flip_binexpr
pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { //
let expr = ctx.node_at_offset::<BinExpr>()?; // Flips operands of a binary expression.
//
// ```
// fn main() {
// let _ = 90 +<|> 2;
// }
// ```
// ->
// ```
// fn main() {
// let _ = 2 + 90;
// }
// ```
pub(crate) fn flip_binexpr(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let expr = ctx.find_node_at_offset::<BinExpr>()?;
let lhs = expr.lhs()?.syntax().clone(); let lhs = expr.lhs()?.syntax().clone();
let rhs = expr.rhs()?.syntax().clone(); let rhs = expr.rhs()?.syntax().clone();
let op_range = expr.op_token()?.text_range(); let op_range = expr.op_token()?.text_range();
@ -22,16 +34,14 @@ pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assis
return None; return None;
} }
ctx.add_action(AssistId("flip_binexpr"), "flip binary expression", |edit| { ctx.add_assist(AssistId("flip_binexpr"), "flip binary expression", |edit| {
edit.target(op_range); edit.target(op_range);
if let FlipAction::FlipAndReplaceOp(new_op) = action { if let FlipAction::FlipAndReplaceOp(new_op) = action {
edit.replace(op_range, new_op); edit.replace(op_range, new_op);
} }
edit.replace(lhs.text_range(), rhs.text()); edit.replace(lhs.text_range(), rhs.text());
edit.replace(rhs.text_range(), lhs.text()); edit.replace(rhs.text_range(), lhs.text());
}); })
ctx.build()
} }
enum FlipAction { enum FlipAction {

View file

@ -1,12 +1,25 @@
//! FIXME: write short doc here
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::{algo::non_trivia_sibling, Direction, T}; use ra_syntax::{algo::non_trivia_sibling, Direction, T};
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: flip_comma
let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; //
// Flips two comma-separated items.
//
// ```
// fn main() {
// ((1, 2),<|> (3, 4));
// }
// ```
// ->
// ```
// fn main() {
// ((3, 4), (1, 2));
// }
// ```
pub(crate) fn flip_comma(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let comma = ctx.find_token_at_offset(T![,])?;
let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?;
let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?;
@ -16,13 +29,11 @@ pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist>
return None; return None;
} }
ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { ctx.add_assist(AssistId("flip_comma"), "flip comma", |edit| {
edit.target(comma.text_range()); edit.target(comma.text_range());
edit.replace(prev.text_range(), next.to_string()); edit.replace(prev.text_range(), next.to_string());
edit.replace(next.text_range(), prev.to_string()); edit.replace(next.text_range(), prev.to_string());
}); })
ctx.build()
} }
#[cfg(test)] #[cfg(test)]

View file

@ -0,0 +1,117 @@
use hir::db::HirDatabase;
use ra_syntax::{
algo::non_trivia_sibling,
ast::{self, AstNode},
Direction, T,
};
use crate::{Assist, AssistCtx, AssistId};
// Assist: flip_trait_bound
//
// Flips two trait bounds.
//
// ```
// fn foo<T: Clone +<|> Copy>() { }
// ```
// ->
// ```
// fn foo<T: Copy + Clone>() { }
// ```
pub(crate) fn flip_trait_bound(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
// We want to replicate the behavior of `flip_binexpr` by only suggesting
// the assist when the cursor is on a `+`
let plus = ctx.find_token_at_offset(T![+])?;
// Make sure we're in a `TypeBoundList`
if ast::TypeBoundList::cast(plus.parent()).is_none() {
return None;
}
let (before, after) = (
non_trivia_sibling(plus.clone().into(), Direction::Prev)?,
non_trivia_sibling(plus.clone().into(), Direction::Next)?,
);
ctx.add_assist(AssistId("flip_trait_bound"), "flip trait bound", |edit| {
edit.target(plus.text_range());
edit.replace(before.text_range(), after.to_string());
edit.replace(after.text_range(), before.to_string());
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
#[test]
fn flip_trait_bound_assist_available() {
check_assist_target(flip_trait_bound, "struct S<T> where T: A <|>+ B + C { }", "+")
}
#[test]
fn flip_trait_bound_not_applicable_for_single_trait_bound() {
check_assist_not_applicable(flip_trait_bound, "struct S<T> where T: <|>A { }")
}
#[test]
fn flip_trait_bound_works_for_struct() {
check_assist(
flip_trait_bound,
"struct S<T> where T: A <|>+ B { }",
"struct S<T> where T: B <|>+ A { }",
)
}
#[test]
fn flip_trait_bound_works_for_trait_impl() {
check_assist(
flip_trait_bound,
"impl X for S<T> where T: A +<|> B { }",
"impl X for S<T> where T: B +<|> A { }",
)
}
#[test]
fn flip_trait_bound_works_for_fn() {
check_assist(flip_trait_bound, "fn f<T: A <|>+ B>(t: T) { }", "fn f<T: B <|>+ A>(t: T) { }")
}
#[test]
fn flip_trait_bound_works_for_fn_where_clause() {
check_assist(
flip_trait_bound,
"fn f<T>(t: T) where T: A +<|> B { }",
"fn f<T>(t: T) where T: B +<|> A { }",
)
}
#[test]
fn flip_trait_bound_works_for_lifetime() {
check_assist(
flip_trait_bound,
"fn f<T>(t: T) where T: A <|>+ 'static { }",
"fn f<T>(t: T) where T: 'static <|>+ A { }",
)
}
#[test]
fn flip_trait_bound_works_for_complex_bounds() {
check_assist(
flip_trait_bound,
"struct S<T> where T: A<T> <|>+ b_mod::B<T> + C<T> { }",
"struct S<T> where T: b_mod::B<T> <|>+ A<T> + C<T> { }",
)
}
#[test]
fn flip_trait_bound_works_for_long_bounds() {
check_assist(
flip_trait_bound,
"struct S<T> where T: A + B + C + D + E + F +<|> G + H + I + J { }",
"struct S<T> where T: A + B + C + D + E + G +<|> F + H + I + J { }",
)
}
}

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, AstToken}, ast::{self, AstNode, AstToken},
@ -9,8 +7,24 @@ use ra_syntax::{
use crate::assist_ctx::AssistBuilder; use crate::assist_ctx::AssistBuilder;
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: inline_local_variable
let let_stmt = ctx.node_at_offset::<ast::LetStmt>()?; //
// Inlines local variable.
//
// ```
// fn main() {
// let x<|> = 1 + 2;
// x * 4;
// }
// ```
// ->
// ```
// fn main() {
// (1 + 2) * 4;
// }
// ```
pub(crate) fn inline_local_varialbe(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let let_stmt = ctx.find_node_at_offset::<ast::LetStmt>()?;
let bind_pat = match let_stmt.pat()? { let bind_pat = match let_stmt.pat()? {
ast::Pat::BindPat(pat) => pat, ast::Pat::BindPat(pat) => pat,
_ => return None, _ => return None,
@ -37,10 +51,8 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
let mut wrap_in_parens = vec![true; refs.len()]; let mut wrap_in_parens = vec![true; refs.len()];
for (i, desc) in refs.iter().enumerate() { for (i, desc) in refs.iter().enumerate() {
let usage_node = ctx let usage_node =
.covering_node_for_range(desc.range) ctx.covering_node_for_range(desc.range).ancestors().find_map(ast::PathExpr::cast)?;
.ancestors()
.find_map(|node| ast::PathExpr::cast(node))?;
let usage_parent_option = usage_node.syntax().parent().and_then(ast::Expr::cast); let usage_parent_option = usage_node.syntax().parent().and_then(ast::Expr::cast);
let usage_parent = match usage_parent_option { let usage_parent = match usage_parent_option {
Some(u) => u, Some(u) => u,
@ -79,7 +91,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
let init_str = initializer_expr.syntax().text().to_string(); let init_str = initializer_expr.syntax().text().to_string();
let init_in_paren = format!("({})", &init_str); let init_in_paren = format!("({})", &init_str);
ctx.add_action( ctx.add_assist(
AssistId("inline_local_variable"), AssistId("inline_local_variable"),
"inline local variable", "inline local variable",
move |edit: &mut AssistBuilder| { move |edit: &mut AssistBuilder| {
@ -93,9 +105,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
} }
edit.set_cursor(delete_range.start()) edit.set_cursor(delete_range.start())
}, },
); )
ctx.build()
} }
#[cfg(test)] #[cfg(test)]

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use format_buf::format; use format_buf::format;
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
@ -14,7 +12,23 @@ use test_utils::tested_by;
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: introduce_variable
//
// Extracts subexpression into a variable.
//
// ```
// fn main() {
// <|>(1 + 2)<|> * 4;
// }
// ```
// ->
// ```
// fn main() {
// let var_name = (1 + 2);
// var_name * 4;
// }
// ```
pub(crate) fn introduce_variable(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
if ctx.frange.range.is_empty() { if ctx.frange.range.is_empty() {
return None; return None;
} }
@ -29,7 +43,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
if indent.kind() != WHITESPACE { if indent.kind() != WHITESPACE {
return None; return None;
} }
ctx.add_action(AssistId("introduce_variable"), "introduce variable", move |edit| { ctx.add_assist(AssistId("introduce_variable"), "introduce variable", move |edit| {
let mut buf = String::new(); let mut buf = String::new();
let cursor_offset = if wrap_in_block { let cursor_offset = if wrap_in_block {
@ -74,9 +88,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
} }
} }
edit.set_cursor(anchor_stmt.text_range().start() + cursor_offset); edit.set_cursor(anchor_stmt.text_range().start() + cursor_offset);
}); })
ctx.build()
} }
/// Check whether the node is a valid expression which can be extracted to a variable. /// Check whether the node is a valid expression which can be extracted to a variable.

View file

@ -1,11 +1,33 @@
//! FIXME: write short doc here
use crate::{Assist, AssistCtx, AssistId, TextRange, TextUnit}; use crate::{Assist, AssistCtx, AssistId, TextRange, TextUnit};
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::ast::{AstNode, MatchArm}; use ra_syntax::ast::{AstNode, MatchArm};
pub(crate) fn merge_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: merge_match_arms
let current_arm = ctx.node_at_offset::<MatchArm>()?; //
// Merges identical match arms.
//
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// match action {
// <|>Action::Move(..) => foo(),
// Action::Stop => foo(),
// }
// }
// ```
// ->
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// match action {
// Action::Move(..) | Action::Stop => foo(),
// }
// }
// ```
pub(crate) fn merge_match_arms(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let current_arm = ctx.find_node_at_offset::<MatchArm>()?;
// We check if the following match arm matches this one. We could, but don't, // We check if the following match arm matches this one. We could, but don't,
// compare to the previous match arm as well. // compare to the previous match arm as well.
@ -30,7 +52,7 @@ pub(crate) fn merge_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<A
let cursor_to_end = current_arm.syntax().text_range().end() - ctx.frange.range.start(); let cursor_to_end = current_arm.syntax().text_range().end() - ctx.frange.range.start();
ctx.add_action(AssistId("merge_match_arms"), "merge match arms", |edit| { ctx.add_assist(AssistId("merge_match_arms"), "merge match arms", |edit| {
fn contains_placeholder(a: &MatchArm) -> bool { fn contains_placeholder(a: &MatchArm) -> bool {
a.pats().any(|x| match x { a.pats().any(|x| match x {
ra_syntax::ast::Pat::PlaceholderPat(..) => true, ra_syntax::ast::Pat::PlaceholderPat(..) => true,
@ -58,9 +80,7 @@ pub(crate) fn merge_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<A
edit.target(current_arm.syntax().text_range()); edit.target(current_arm.syntax().text_range());
edit.replace(TextRange::from_to(start, end), arm); edit.replace(TextRange::from_to(start, end), arm);
edit.set_cursor(start + offset); edit.set_cursor(start + offset);
}); })
ctx.build()
} }
#[cfg(test)] #[cfg(test)]

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, edit, make, AstNode, NameOwner, TypeBoundsOwner}, ast::{self, edit, make, AstNode, NameOwner, TypeBoundsOwner},
@ -9,8 +7,23 @@ use ra_syntax::{
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn move_bounds_to_where_clause(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: move_bounds_to_where_clause
let type_param_list = ctx.node_at_offset::<ast::TypeParamList>()?; //
// Moves inline type bounds to a where clause.
//
// ```
// fn apply<T, U, <|>F: FnOnce(T) -> U>(f: F, x: T) -> U {
// f(x)
// }
// ```
// ->
// ```
// fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
// f(x)
// }
// ```
pub(crate) fn move_bounds_to_where_clause(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let type_param_list = ctx.find_node_at_offset::<ast::TypeParamList>()?;
let mut type_params = type_param_list.type_params(); let mut type_params = type_param_list.type_params();
if type_params.all(|p| p.type_bound_list().is_none()) { if type_params.all(|p| p.type_bound_list().is_none()) {
@ -33,38 +46,30 @@ pub(crate) fn move_bounds_to_where_clause(mut ctx: AssistCtx<impl HirDatabase>)
_ => return None, _ => return None,
}; };
ctx.add_action( ctx.add_assist(AssistId("move_bounds_to_where_clause"), "move_bounds_to_where_clause", |edit| {
AssistId("move_bounds_to_where_clause"), let new_params = type_param_list
"move_bounds_to_where_clause", .type_params()
|edit| { .filter(|it| it.type_bound_list().is_some())
let new_params = type_param_list .map(|type_param| {
.type_params() let without_bounds = type_param.remove_bounds();
.filter(|it| it.type_bound_list().is_some()) (type_param, without_bounds)
.map(|type_param| { });
let without_bounds = type_param.remove_bounds();
(type_param, without_bounds)
});
let new_type_param_list = edit::replace_descendants(&type_param_list, new_params); let new_type_param_list = edit::replace_descendants(&type_param_list, new_params);
edit.replace_ast(type_param_list.clone(), new_type_param_list); edit.replace_ast(type_param_list.clone(), new_type_param_list);
let where_clause = { let where_clause = {
let predicates = type_param_list.type_params().filter_map(build_predicate); let predicates = type_param_list.type_params().filter_map(build_predicate);
make::where_clause(predicates) make::where_clause(predicates)
}; };
let to_insert = match anchor.prev_sibling_or_token() { let to_insert = match anchor.prev_sibling_or_token() {
Some(ref elem) if elem.kind() == WHITESPACE => { Some(ref elem) if elem.kind() == WHITESPACE => format!("{} ", where_clause.syntax()),
format!("{} ", where_clause.syntax()) _ => format!(" {}", where_clause.syntax()),
} };
_ => format!(" {}", where_clause.syntax()), edit.insert(anchor.text_range().start(), to_insert);
}; edit.target(type_param_list.syntax().text_range());
edit.insert(anchor.text_range().start(), to_insert); })
edit.target(type_param_list.syntax().text_range());
},
);
ctx.build()
} }
fn build_predicate(param: ast::TypeParam) -> Option<ast::WherePred> { fn build_predicate(param: ast::TypeParam) -> Option<ast::WherePred> {

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast, ast,
@ -9,8 +7,33 @@ use ra_syntax::{
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: move_guard_to_arm_body
let match_arm = ctx.node_at_offset::<MatchArm>()?; //
// Moves match guard into match arm body.
//
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// match action {
// Action::Move { distance } <|>if distance > 10 => foo(),
// _ => (),
// }
// }
// ```
// ->
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// match action {
// Action::Move { distance } => if distance > 10 { foo() },
// _ => (),
// }
// }
// ```
pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let match_arm = ctx.find_node_at_offset::<MatchArm>()?;
let guard = match_arm.guard()?; let guard = match_arm.guard()?;
let space_before_guard = guard.syntax().prev_sibling_or_token(); let space_before_guard = guard.syntax().prev_sibling_or_token();
@ -18,7 +41,7 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op
let arm_expr = match_arm.expr()?; let arm_expr = match_arm.expr()?;
let buf = format!("if {} {{ {} }}", guard_conditions.syntax().text(), arm_expr.syntax().text()); let buf = format!("if {} {{ {} }}", guard_conditions.syntax().text(), arm_expr.syntax().text());
ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { ctx.add_assist(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| {
edit.target(guard.syntax().text_range()); edit.target(guard.syntax().text_range());
let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) { let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) {
Some(tok) => { Some(tok) => {
@ -38,12 +61,36 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op
edit.set_cursor( edit.set_cursor(
arm_expr.syntax().text_range().start() + TextUnit::from(3) - offseting_amount, arm_expr.syntax().text_range().start() + TextUnit::from(3) - offseting_amount,
); );
}); })
ctx.build()
} }
pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: move_arm_cond_to_match_guard
let match_arm: MatchArm = ctx.node_at_offset::<MatchArm>()?; //
// Moves if expression from match arm body into a guard.
//
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// match action {
// Action::Move { distance } => <|>if distance > 10 { foo() },
// _ => (),
// }
// }
// ```
// ->
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// match action {
// Action::Move { distance } if distance > 10 => foo(),
// _ => (),
// }
// }
// ```
pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?;
let last_match_pat = match_arm.pats().last()?; let last_match_pat = match_arm.pats().last()?;
let arm_body = match_arm.expr()?; let arm_body = match_arm.expr()?;
@ -62,7 +109,7 @@ pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>)
let buf = format!(" if {}", cond.syntax().text()); let buf = format!(" if {}", cond.syntax().text());
ctx.add_action( ctx.add_assist(
AssistId("move_arm_cond_to_match_guard"), AssistId("move_arm_cond_to_match_guard"),
"move condition to match guard", "move condition to match guard",
|edit| { |edit| {
@ -79,8 +126,7 @@ pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>)
edit.insert(last_match_pat.syntax().text_range().end(), buf); edit.insert(last_match_pat.syntax().text_range().end(), buf);
edit.set_cursor(last_match_pat.syntax().text_range().end() + TextUnit::from(1)); edit.set_cursor(last_match_pat.syntax().text_range().end() + TextUnit::from(1));
}, },
); )
ctx.build()
} }
#[cfg(test)] #[cfg(test)]

View file

@ -1,17 +1,29 @@
//! FIXME: write short doc here
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::{ast::AstNode, ast::Literal, TextRange, TextUnit}; use ra_syntax::{
SyntaxKind::{RAW_STRING, STRING},
TextRange, TextUnit,
};
use rustc_lexer; use rustc_lexer;
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn make_raw_string(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: make_raw_string
let literal = ctx.node_at_offset::<Literal>()?; //
if literal.token().kind() != ra_syntax::SyntaxKind::STRING { // Adds `r#` to a plain string literal.
return None; //
} // ```
let token = literal.token(); // fn main() {
// "Hello,<|> World!";
// }
// ```
// ->
// ```
// fn main() {
// r#"Hello, World!"#;
// }
// ```
pub(crate) fn make_raw_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let token = ctx.find_token_at_offset(STRING)?;
let text = token.text().as_str(); let text = token.text().as_str();
let usual_string_range = find_usual_string_range(text)?; let usual_string_range = find_usual_string_range(text)?;
let start_of_inside = usual_string_range.start().to_usize() + 1; let start_of_inside = usual_string_range.start().to_usize() + 1;
@ -29,19 +41,105 @@ pub(crate) fn make_raw_string(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
if error.is_err() { if error.is_err() {
return None; return None;
} }
ctx.add_action(AssistId("make_raw_string"), "make raw string", |edit| { ctx.add_assist(AssistId("make_raw_string"), "make raw string", |edit| {
edit.target(literal.syntax().text_range()); edit.target(token.text_range());
let max_hash_streak = count_hashes(&unescaped); let max_hash_streak = count_hashes(&unescaped);
let mut hashes = String::with_capacity(max_hash_streak + 1); let mut hashes = String::with_capacity(max_hash_streak + 1);
for _ in 0..hashes.capacity() { for _ in 0..hashes.capacity() {
hashes.push('#'); hashes.push('#');
} }
edit.replace( edit.replace(token.text_range(), format!("r{}\"{}\"{}", hashes, unescaped, hashes));
literal.syntax().text_range(), })
format!("r{}\"{}\"{}", hashes, unescaped, hashes), }
);
}); // Assist: make_usual_string
ctx.build() //
// Turns a raw string into a plain string.
//
// ```
// fn main() {
// r#"Hello,<|> "World!""#;
// }
// ```
// ->
// ```
// fn main() {
// "Hello, \"World!\"";
// }
// ```
pub(crate) fn make_usual_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let token = ctx.find_token_at_offset(RAW_STRING)?;
let text = token.text().as_str();
let usual_string_range = find_usual_string_range(text)?;
ctx.add_assist(AssistId("make_usual_string"), "make usual string", |edit| {
edit.target(token.text_range());
// parse inside string to escape `"`
let start_of_inside = usual_string_range.start().to_usize() + 1;
let end_of_inside = usual_string_range.end().to_usize();
let inside_str = &text[start_of_inside..end_of_inside];
let escaped = inside_str.escape_default().to_string();
edit.replace(token.text_range(), format!("\"{}\"", escaped));
})
}
// Assist: add_hash
//
// Adds a hash to a raw string literal.
//
// ```
// fn main() {
// r#"Hello,<|> World!"#;
// }
// ```
// ->
// ```
// fn main() {
// r##"Hello, World!"##;
// }
// ```
pub(crate) fn add_hash(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let token = ctx.find_token_at_offset(RAW_STRING)?;
ctx.add_assist(AssistId("add_hash"), "add hash to raw string", |edit| {
edit.target(token.text_range());
edit.insert(token.text_range().start() + TextUnit::of_char('r'), "#");
edit.insert(token.text_range().end(), "#");
})
}
// Assist: remove_hash
//
// Removes a hash from a raw string literal.
//
// ```
// fn main() {
// r#"Hello,<|> World!"#;
// }
// ```
// ->
// ```
// fn main() {
// r"Hello, World!";
// }
// ```
pub(crate) fn remove_hash(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let token = ctx.find_token_at_offset(RAW_STRING)?;
let text = token.text().as_str();
if text.starts_with("r\"") {
// no hash to remove
return None;
}
ctx.add_assist(AssistId("remove_hash"), "remove hash from raw string", |edit| {
edit.target(token.text_range());
let result = &text[2..text.len() - 1];
let result = if result.starts_with('\"') {
// no more hash, escape
let internal_str = &result[1..result.len() - 1];
format!("\"{}\"", internal_str.escape_default().to_string())
} else {
result.to_owned()
};
edit.replace(token.text_range(), format!("r{}", result));
})
} }
fn count_hashes(s: &str) -> usize { fn count_hashes(s: &str) -> usize {
@ -57,69 +155,17 @@ fn count_hashes(s: &str) -> usize {
} }
fn find_usual_string_range(s: &str) -> Option<TextRange> { fn find_usual_string_range(s: &str) -> Option<TextRange> {
Some(TextRange::from_to( let left_quote = s.find('"')?;
TextUnit::from(s.find('"')? as u32), let right_quote = s.rfind('"')?;
TextUnit::from(s.rfind('"')? as u32), if left_quote == right_quote {
)) // `s` only contains one quote
} None
} else {
pub(crate) fn make_usual_string(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { Some(TextRange::from_to(
let literal = ctx.node_at_offset::<Literal>()?; TextUnit::from(left_quote as u32),
if literal.token().kind() != ra_syntax::SyntaxKind::RAW_STRING { TextUnit::from(right_quote as u32),
return None; ))
} }
let token = literal.token();
let text = token.text().as_str();
let usual_string_range = find_usual_string_range(text)?;
ctx.add_action(AssistId("make_usual_string"), "make usual string", |edit| {
edit.target(literal.syntax().text_range());
// parse inside string to escape `"`
let start_of_inside = usual_string_range.start().to_usize() + 1;
let end_of_inside = usual_string_range.end().to_usize();
let inside_str = &text[start_of_inside..end_of_inside];
let escaped = inside_str.escape_default().to_string();
edit.replace(literal.syntax().text_range(), format!("\"{}\"", escaped));
});
ctx.build()
}
pub(crate) fn add_hash(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let literal = ctx.node_at_offset::<Literal>()?;
if literal.token().kind() != ra_syntax::SyntaxKind::RAW_STRING {
return None;
}
ctx.add_action(AssistId("add_hash"), "add hash to raw string", |edit| {
edit.target(literal.syntax().text_range());
edit.insert(literal.syntax().text_range().start() + TextUnit::of_char('r'), "#");
edit.insert(literal.syntax().text_range().end(), "#");
});
ctx.build()
}
pub(crate) fn remove_hash(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let literal = ctx.node_at_offset::<Literal>()?;
if literal.token().kind() != ra_syntax::SyntaxKind::RAW_STRING {
return None;
}
let token = literal.token();
let text = token.text().as_str();
if text.starts_with("r\"") {
// no hash to remove
return None;
}
ctx.add_action(AssistId("remove_hash"), "remove hash from raw string", |edit| {
edit.target(literal.syntax().text_range());
let result = &text[2..text.len() - 1];
let result = if result.starts_with("\"") {
// no more hash, escape
let internal_str = &result[1..result.len() - 1];
format!("\"{}\"", internal_str.escape_default().to_string())
} else {
result.to_owned()
};
edit.replace(literal.syntax().text_range(), format!("r{}", result));
});
ctx.build()
} }
#[cfg(test)] #[cfg(test)]
@ -158,6 +204,23 @@ string"#;
) )
} }
#[test]
fn make_raw_string_works_inside_macros() {
check_assist(
make_raw_string,
r#"
fn f() {
format!(<|>"x = {}", 92)
}
"#,
r##"
fn f() {
format!(<|>r#"x = {}"#, 92)
}
"##,
)
}
#[test] #[test]
fn make_raw_string_hashes_inside_works() { fn make_raw_string_hashes_inside_works() {
check_assist( check_assist(
@ -211,6 +274,30 @@ string"###;
) )
} }
#[test]
fn make_raw_string_not_works_on_partial_string() {
check_assist_not_applicable(
make_raw_string,
r#"
fn f() {
let s = "foo<|>
}
"#,
)
}
#[test]
fn make_usual_string_not_works_on_partial_string() {
check_assist_not_applicable(
make_usual_string,
r#"
fn main() {
let s = r#"bar<|>
}
"#,
)
}
#[test] #[test]
fn add_hash_target() { fn add_hash_target() {
check_assist_target( check_assist_target(

View file

@ -1,14 +1,28 @@
//! FIXME: write short doc here
use crate::{Assist, AssistCtx, AssistId};
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
TextUnit, T, TextUnit, T,
}; };
pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { use crate::{Assist, AssistCtx, AssistId};
let macro_call = ctx.node_at_offset::<ast::MacroCall>()?;
// Assist: remove_dbg
//
// Removes `dbg!()` macro call.
//
// ```
// fn main() {
// <|>dbg!(92);
// }
// ```
// ->
// ```
// fn main() {
// 92;
// }
// ```
pub(crate) fn remove_dbg(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let macro_call = ctx.find_node_at_offset::<ast::MacroCall>()?;
if !is_valid_macrocall(&macro_call, "dbg")? { if !is_valid_macrocall(&macro_call, "dbg")? {
return None; return None;
@ -44,13 +58,11 @@ pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist>
text.slice(without_parens).to_string() text.slice(without_parens).to_string()
}; };
ctx.add_action(AssistId("remove_dbg"), "remove dbg!()", |edit| { ctx.add_assist(AssistId("remove_dbg"), "remove dbg!()", |edit| {
edit.target(macro_call.syntax().text_range()); edit.target(macro_call.syntax().text_range());
edit.replace(macro_range, macro_content); edit.replace(macro_range, macro_content);
edit.set_cursor(cursor_pos); edit.set_cursor(cursor_pos);
}); })
ctx.build()
} }
/// Verifies that the given macro_call actually matches the given name /// Verifies that the given macro_call actually matches the given name

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use format_buf::format; use format_buf::format;
use hir::db::HirDatabase; use hir::db::HirDatabase;
use ra_fmt::extract_trivial_expression; use ra_fmt::extract_trivial_expression;
@ -7,8 +5,34 @@ use ra_syntax::{ast, AstNode};
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: replace_if_let_with_match
let if_expr: ast::IfExpr = ctx.node_at_offset()?; //
// Replaces `if let` with an else branch with a `match` expression.
//
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// <|>if let Action::Move { distance } = action {
// foo(distance)
// } else {
// bar()
// }
// }
// ```
// ->
// ```
// enum Action { Move { distance: u32 }, Stop }
//
// fn handle(action: Action) {
// match action {
// Action::Move { distance } => foo(distance),
// _ => bar(),
// }
// }
// ```
pub(crate) fn replace_if_let_with_match(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
let cond = if_expr.condition()?; let cond = if_expr.condition()?;
let pat = cond.pat()?; let pat = cond.pat()?;
let expr = cond.expr()?; let expr = cond.expr()?;
@ -18,14 +42,12 @@ pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) ->
ast::ElseBranch::IfExpr(_) => return None, ast::ElseBranch::IfExpr(_) => return None,
}; };
ctx.add_action(AssistId("replace_if_let_with_match"), "replace with match", |edit| { ctx.add_assist(AssistId("replace_if_let_with_match"), "replace with match", |edit| {
let match_expr = build_match_expr(expr, pat, then_block, else_block); let match_expr = build_match_expr(expr, pat, then_block, else_block);
edit.target(if_expr.syntax().text_range()); edit.target(if_expr.syntax().text_range());
edit.replace_node_and_indent(if_expr.syntax(), match_expr); edit.replace_node_and_indent(if_expr.syntax(), match_expr);
edit.set_cursor(if_expr.syntax().text_range().start()) edit.set_cursor(if_expr.syntax().text_range().start())
}); })
ctx.build()
} }
fn build_match_expr( fn build_match_expr(

View file

@ -1,5 +1,3 @@
//! FIXME: write short doc here
use std::iter::successors; use std::iter::successors;
use hir::db::HirDatabase; use hir::db::HirDatabase;
@ -7,8 +5,19 @@ use ra_syntax::{ast, AstNode, TextUnit, T};
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
pub(crate) fn split_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { // Assist: split_import
let colon_colon = ctx.token_at_offset().find(|leaf| leaf.kind() == T![::])?; //
// Wraps the tail of import into braces.
//
// ```
// use std::<|>collections::HashMap;
// ```
// ->
// ```
// use std::{collections::HashMap};
// ```
pub(crate) fn split_import(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let colon_colon = ctx.find_token_at_offset(T![::])?;
let path = ast::Path::cast(colon_colon.parent())?; let path = ast::Path::cast(colon_colon.parent())?;
let top_path = successors(Some(path), |it| it.parent_path()).last()?; let top_path = successors(Some(path), |it| it.parent_path()).last()?;
@ -23,14 +32,12 @@ pub(crate) fn split_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assis
None => top_path.syntax().text_range().end(), None => top_path.syntax().text_range().end(),
}; };
ctx.add_action(AssistId("split_import"), "split import", |edit| { ctx.add_assist(AssistId("split_import"), "split import", |edit| {
edit.target(colon_colon.text_range()); edit.target(colon_colon.text_range());
edit.insert(l_curly, "{"); edit.insert(l_curly, "{");
edit.insert(r_curly, "}"); edit.insert(r_curly, "}");
edit.set_cursor(l_curly + TextUnit::of_str("{")); edit.set_cursor(l_curly + TextUnit::of_str("{"));
}); })
ctx.build()
} }
#[cfg(test)] #[cfg(test)]

View file

@ -0,0 +1,34 @@
//! Each assist definition has a special comment, which specifies docs and
//! example.
//!
//! We collect all the example and write the as tests in this module.
mod generated;
use hir::mock::MockDatabase;
use ra_db::FileRange;
use test_utils::{assert_eq_text, extract_range_or_offset};
fn check(assist_id: &str, before: &str, after: &str) {
let (selection, before) = extract_range_or_offset(before);
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
let frange = FileRange { file_id, range: selection.into() };
let (_assist_id, action) = crate::assists(&db, frange)
.into_iter()
.find(|(id, _)| id.id.0 == assist_id)
.unwrap_or_else(|| {
panic!(
"\n\nAssist is not applicable: {}\nAvailable assists: {}",
assist_id,
crate::assists(&db, frange)
.into_iter()
.map(|(id, _)| id.id.0)
.collect::<Vec<_>>()
.join(", ")
)
});
let actual = action.edit.apply(&before);
assert_eq_text!(after, &actual);
}

View file

@ -0,0 +1,526 @@
//! Generated file, do not edit by hand, see `crate/ra_tools/src/codegen`
use super::check;
#[test]
fn doctest_add_derive() {
check(
"add_derive",
r#####"
struct Point {
x: u32,
y: u32,<|>
}
"#####,
r#####"
#[derive()]
struct Point {
x: u32,
y: u32,
}
"#####,
)
}
#[test]
fn doctest_add_explicit_type() {
check(
"add_explicit_type",
r#####"
fn main() {
let x<|> = 92;
}
"#####,
r#####"
fn main() {
let x: i32 = 92;
}
"#####,
)
}
#[test]
fn doctest_add_hash() {
check(
"add_hash",
r#####"
fn main() {
r#"Hello,<|> World!"#;
}
"#####,
r#####"
fn main() {
r##"Hello, World!"##;
}
"#####,
)
}
#[test]
fn doctest_add_impl() {
check(
"add_impl",
r#####"
struct Ctx<T: Clone> {
data: T,<|>
}
"#####,
r#####"
struct Ctx<T: Clone> {
data: T,
}
impl<T: Clone> Ctx<T> {
}
"#####,
)
}
#[test]
fn doctest_add_impl_default_members() {
check(
"add_impl_default_members",
r#####"
trait T {
Type X;
fn foo(&self);
fn bar(&self) {}
}
impl T for () {
Type X = ();
fn foo(&self) {}<|>
}
"#####,
r#####"
trait T {
Type X;
fn foo(&self);
fn bar(&self) {}
}
impl T for () {
Type X = ();
fn foo(&self) {}
fn bar(&self) {}
}
"#####,
)
}
#[test]
fn doctest_add_impl_missing_members() {
check(
"add_impl_missing_members",
r#####"
trait T {
Type X;
fn foo(&self);
fn bar(&self) {}
}
impl T for () {<|>
}
"#####,
r#####"
trait T {
Type X;
fn foo(&self);
fn bar(&self) {}
}
impl T for () {
fn foo(&self) { unimplemented!() }
}
"#####,
)
}
#[test]
fn doctest_add_import() {
check(
"add_import",
r#####"
fn process(map: std::collections::<|>HashMap<String, String>) {}
"#####,
r#####"
use std::collections::HashMap;
fn process(map: HashMap<String, String>) {}
"#####,
)
}
#[test]
fn doctest_apply_demorgan() {
check(
"apply_demorgan",
r#####"
fn main() {
if x != 4 ||<|> !y {}
}
"#####,
r#####"
fn main() {
if !(x == 4 && y) {}
}
"#####,
)
}
#[test]
fn doctest_change_visibility() {
check(
"change_visibility",
r#####"
<|>fn frobnicate() {}
"#####,
r#####"
pub(crate) fn frobnicate() {}
"#####,
)
}
#[test]
fn doctest_convert_to_guarded_return() {
check(
"convert_to_guarded_return",
r#####"
fn main() {
<|>if cond {
foo();
bar();
}
}
"#####,
r#####"
fn main() {
if !cond {
return;
}
foo();
bar();
}
"#####,
)
}
#[test]
fn doctest_fill_match_arms() {
check(
"fill_match_arms",
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
<|>
}
}
"#####,
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
Action::Move { distance } => (),
Action::Stop => (),
}
}
"#####,
)
}
#[test]
fn doctest_flip_binexpr() {
check(
"flip_binexpr",
r#####"
fn main() {
let _ = 90 +<|> 2;
}
"#####,
r#####"
fn main() {
let _ = 2 + 90;
}
"#####,
)
}
#[test]
fn doctest_flip_comma() {
check(
"flip_comma",
r#####"
fn main() {
((1, 2),<|> (3, 4));
}
"#####,
r#####"
fn main() {
((3, 4), (1, 2));
}
"#####,
)
}
#[test]
fn doctest_flip_trait_bound() {
check(
"flip_trait_bound",
r#####"
fn foo<T: Clone +<|> Copy>() { }
"#####,
r#####"
fn foo<T: Copy + Clone>() { }
"#####,
)
}
#[test]
fn doctest_inline_local_variable() {
check(
"inline_local_variable",
r#####"
fn main() {
let x<|> = 1 + 2;
x * 4;
}
"#####,
r#####"
fn main() {
(1 + 2) * 4;
}
"#####,
)
}
#[test]
fn doctest_introduce_variable() {
check(
"introduce_variable",
r#####"
fn main() {
<|>(1 + 2)<|> * 4;
}
"#####,
r#####"
fn main() {
let var_name = (1 + 2);
var_name * 4;
}
"#####,
)
}
#[test]
fn doctest_make_raw_string() {
check(
"make_raw_string",
r#####"
fn main() {
"Hello,<|> World!";
}
"#####,
r#####"
fn main() {
r#"Hello, World!"#;
}
"#####,
)
}
#[test]
fn doctest_make_usual_string() {
check(
"make_usual_string",
r#####"
fn main() {
r#"Hello,<|> "World!""#;
}
"#####,
r#####"
fn main() {
"Hello, \"World!\"";
}
"#####,
)
}
#[test]
fn doctest_merge_match_arms() {
check(
"merge_match_arms",
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
<|>Action::Move(..) => foo(),
Action::Stop => foo(),
}
}
"#####,
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
Action::Move(..) | Action::Stop => foo(),
}
}
"#####,
)
}
#[test]
fn doctest_move_arm_cond_to_match_guard() {
check(
"move_arm_cond_to_match_guard",
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
Action::Move { distance } => <|>if distance > 10 { foo() },
_ => (),
}
}
"#####,
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
Action::Move { distance } if distance > 10 => foo(),
_ => (),
}
}
"#####,
)
}
#[test]
fn doctest_move_bounds_to_where_clause() {
check(
"move_bounds_to_where_clause",
r#####"
fn apply<T, U, <|>F: FnOnce(T) -> U>(f: F, x: T) -> U {
f(x)
}
"#####,
r#####"
fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
f(x)
}
"#####,
)
}
#[test]
fn doctest_move_guard_to_arm_body() {
check(
"move_guard_to_arm_body",
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
Action::Move { distance } <|>if distance > 10 => foo(),
_ => (),
}
}
"#####,
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
Action::Move { distance } => if distance > 10 { foo() },
_ => (),
}
}
"#####,
)
}
#[test]
fn doctest_remove_dbg() {
check(
"remove_dbg",
r#####"
fn main() {
<|>dbg!(92);
}
"#####,
r#####"
fn main() {
92;
}
"#####,
)
}
#[test]
fn doctest_remove_hash() {
check(
"remove_hash",
r#####"
fn main() {
r#"Hello,<|> World!"#;
}
"#####,
r#####"
fn main() {
r"Hello, World!";
}
"#####,
)
}
#[test]
fn doctest_replace_if_let_with_match() {
check(
"replace_if_let_with_match",
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
<|>if let Action::Move { distance } = action {
foo(distance)
} else {
bar()
}
}
"#####,
r#####"
enum Action { Move { distance: u32 }, Stop }
fn handle(action: Action) {
match action {
Action::Move { distance } => foo(distance),
_ => bar(),
}
}
"#####,
)
}
#[test]
fn doctest_split_import() {
check(
"split_import",
r#####"
use std::<|>collections::HashMap;
"#####,
r#####"
use std::{collections::HashMap};
"#####,
)
}

View file

@ -7,15 +7,16 @@
mod assist_ctx; mod assist_ctx;
mod marks; mod marks;
#[cfg(test)]
mod doc_tests;
use hir::db::HirDatabase; use hir::db::HirDatabase;
use itertools::Itertools;
use ra_db::FileRange; use ra_db::FileRange;
use ra_syntax::{TextRange, TextUnit}; use ra_syntax::{TextRange, TextUnit};
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
pub(crate) use crate::assist_ctx::{Assist, AssistCtx}; pub(crate) use crate::assist_ctx::{Assist, AssistCtx};
pub use crate::assists::auto_import::auto_import_text_edit; pub use crate::assists::add_import::auto_import_text_edit;
/// Unique identifier of the assist, should not be shown to the user /// Unique identifier of the assist, should not be shown to the user
/// directly. /// directly.
@ -36,7 +37,7 @@ pub struct AssistAction {
pub target: Option<TextRange>, pub target: Option<TextRange>,
} }
/// Return all the assists eapplicable at the given position. /// Return all the assists applicable at the given position.
/// ///
/// Assists are returned in the "unresolved" state, that is only labels are /// Assists are returned in the "unresolved" state, that is only labels are
/// returned, without actual edits. /// returned, without actual edits.
@ -49,10 +50,10 @@ where
.iter() .iter()
.filter_map(|f| f(ctx.clone())) .filter_map(|f| f(ctx.clone()))
.map(|a| match a { .map(|a| match a {
Assist::Unresolved(labels) => labels, Assist::Unresolved { label } => label,
Assist::Resolved(..) => unreachable!(), Assist::Resolved { .. } => unreachable!(),
}) })
.concat() .collect()
}) })
} }
@ -71,10 +72,10 @@ where
.iter() .iter()
.filter_map(|f| f(ctx.clone())) .filter_map(|f| f(ctx.clone()))
.map(|a| match a { .map(|a| match a {
Assist::Resolved(labels_actions) => labels_actions, Assist::Resolved { label, action } => (label, action),
Assist::Unresolved(..) => unreachable!(), Assist::Unresolved { .. } => unreachable!(),
}) })
.concat(); .collect::<Vec<_>>();
a.sort_by(|a, b| match (a.1.target, b.1.target) { a.sort_by(|a, b| match (a.1.target, b.1.target) {
(Some(a), Some(b)) => a.len().cmp(&b.len()), (Some(a), Some(b)) => a.len().cmp(&b.len()),
(Some(_), None) => Ordering::Less, (Some(_), None) => Ordering::Less,
@ -95,6 +96,7 @@ mod assists {
mod apply_demorgan; mod apply_demorgan;
mod flip_comma; mod flip_comma;
mod flip_binexpr; mod flip_binexpr;
mod flip_trait_bound;
mod change_visibility; mod change_visibility;
mod fill_match_arms; mod fill_match_arms;
mod merge_match_arms; mod merge_match_arms;
@ -104,7 +106,7 @@ mod assists {
mod replace_if_let_with_match; mod replace_if_let_with_match;
mod split_import; mod split_import;
mod remove_dbg; mod remove_dbg;
pub(crate) mod auto_import; pub(crate) mod add_import;
mod add_missing_impl_members; mod add_missing_impl_members;
mod move_guard; mod move_guard;
mod move_bounds; mod move_bounds;
@ -121,11 +123,12 @@ mod assists {
merge_match_arms::merge_match_arms, merge_match_arms::merge_match_arms,
flip_comma::flip_comma, flip_comma::flip_comma,
flip_binexpr::flip_binexpr, flip_binexpr::flip_binexpr,
flip_trait_bound::flip_trait_bound,
introduce_variable::introduce_variable, introduce_variable::introduce_variable,
replace_if_let_with_match::replace_if_let_with_match, replace_if_let_with_match::replace_if_let_with_match,
split_import::split_import, split_import::split_import,
remove_dbg::remove_dbg, remove_dbg::remove_dbg,
auto_import::auto_import, add_import::add_import,
add_missing_impl_members::add_missing_impl_members, add_missing_impl_members::add_missing_impl_members,
add_missing_impl_members::add_missing_default_members, add_missing_impl_members::add_missing_default_members,
inline_local_variable::inline_local_varialbe, inline_local_variable::inline_local_varialbe,
@ -154,39 +157,6 @@ mod helpers {
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>, assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
before: &str, before: &str,
after: &str, after: &str,
) {
check_assist_nth_action(assist, before, after, 0)
}
pub(crate) fn check_assist_range(
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
before: &str,
after: &str,
) {
check_assist_range_nth_action(assist, before, after, 0)
}
pub(crate) fn check_assist_target(
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
before: &str,
target: &str,
) {
check_assist_target_nth_action(assist, before, target, 0)
}
pub(crate) fn check_assist_range_target(
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
before: &str,
target: &str,
) {
check_assist_range_target_nth_action(assist, before, target, 0)
}
pub(crate) fn check_assist_nth_action(
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
before: &str,
after: &str,
index: usize,
) { ) {
let (before_cursor_pos, before) = extract_offset(before); let (before_cursor_pos, before) = extract_offset(before);
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before); let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
@ -194,12 +164,11 @@ mod helpers {
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
let assist = let assist =
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable");
let labels_actions = match assist { let action = match assist {
Assist::Unresolved(_) => unreachable!(), Assist::Unresolved { .. } => unreachable!(),
Assist::Resolved(labels_actions) => labels_actions, Assist::Resolved { action, .. } => action,
}; };
let (_, action) = labels_actions.get(index).expect("expect assist action at index");
let actual = action.edit.apply(&before); let actual = action.edit.apply(&before);
let actual_cursor_pos = match action.cursor_position { let actual_cursor_pos = match action.cursor_position {
None => action None => action
@ -212,23 +181,21 @@ mod helpers {
assert_eq_text!(after, &actual); assert_eq_text!(after, &actual);
} }
pub(crate) fn check_assist_range_nth_action( pub(crate) fn check_assist_range(
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>, assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
before: &str, before: &str,
after: &str, after: &str,
index: usize,
) { ) {
let (range, before) = extract_range(before); let (range, before) = extract_range(before);
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before); let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
let frange = FileRange { file_id, range }; let frange = FileRange { file_id, range };
let assist = let assist =
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable");
let labels_actions = match assist { let action = match assist {
Assist::Unresolved(_) => unreachable!(), Assist::Unresolved { .. } => unreachable!(),
Assist::Resolved(labels_actions) => labels_actions, Assist::Resolved { action, .. } => action,
}; };
let (_, action) = labels_actions.get(index).expect("expect assist action at index");
let mut actual = action.edit.apply(&before); let mut actual = action.edit.apply(&before);
if let Some(pos) = action.cursor_position { if let Some(pos) = action.cursor_position {
actual = add_cursor(&actual, pos); actual = add_cursor(&actual, pos);
@ -236,11 +203,10 @@ mod helpers {
assert_eq_text!(after, &actual); assert_eq_text!(after, &actual);
} }
pub(crate) fn check_assist_target_nth_action( pub(crate) fn check_assist_target(
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>, assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
before: &str, before: &str,
target: &str, target: &str,
index: usize,
) { ) {
let (before_cursor_pos, before) = extract_offset(before); let (before_cursor_pos, before) = extract_offset(before);
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before); let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
@ -248,33 +214,30 @@ mod helpers {
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
let assist = let assist =
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable");
let labels_actions = match assist { let action = match assist {
Assist::Unresolved(_) => unreachable!(), Assist::Unresolved { .. } => unreachable!(),
Assist::Resolved(labels_actions) => labels_actions, Assist::Resolved { action, .. } => action,
}; };
let (_, action) = labels_actions.get(index).expect("expect assist action at index");
let range = action.target.expect("expected target on action"); let range = action.target.expect("expected target on action");
assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target); assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
} }
pub(crate) fn check_assist_range_target_nth_action( pub(crate) fn check_assist_range_target(
assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>, assist: fn(AssistCtx<MockDatabase>) -> Option<Assist>,
before: &str, before: &str,
target: &str, target: &str,
index: usize,
) { ) {
let (range, before) = extract_range(before); let (range, before) = extract_range(before);
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before); let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
let frange = FileRange { file_id, range }; let frange = FileRange { file_id, range };
let assist = let assist =
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable");
let labels_actions = match assist { let action = match assist {
Assist::Unresolved(_) => unreachable!(), Assist::Unresolved { .. } => unreachable!(),
Assist::Resolved(labels_actions) => labels_actions, Assist::Resolved { action, .. } => action,
}; };
let (_, action) = labels_actions.get(index).expect("expect assist action at index");
let range = action.target.expect("expected target on action"); let range = action.target.expect("expected target on action");
assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target); assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
} }

View file

@ -134,10 +134,7 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
) -> Option<FileId> { ) -> Option<FileId> {
let path = { let path = {
let mut path = self.0.file_relative_path(anchor); let mut path = self.0.file_relative_path(anchor);
// Workaround for relative path API: turn `lib.rs` into ``. assert!(path.pop());
if !path.pop() {
path = RelativePathBuf::default();
}
path.push(relative_path); path.push(relative_path);
path.normalize() path.normalize()
}; };

View file

@ -19,12 +19,14 @@ ra_cfg = { path = "../ra_cfg" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }
mbe = { path = "../ra_mbe", package = "ra_mbe" } mbe = { path = "../ra_mbe", package = "ra_mbe" }
tt = { path = "../ra_tt", package = "ra_tt" } tt = { path = "../ra_tt", package = "ra_tt" }
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }
ra_prof = { path = "../ra_prof" } ra_prof = { path = "../ra_prof" }
chalk-solve = { git = "https://github.com/rust-lang/chalk.git" } chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "8314f2fcec8582a58c24b638f1a259d4145a0809" }
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git" } chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "8314f2fcec8582a58c24b638f1a259d4145a0809" }
chalk-ir = { git = "https://github.com/rust-lang/chalk.git" } chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "8314f2fcec8582a58c24b638f1a259d4145a0809" }
lalrpop-intern = "0.15.1" lalrpop-intern = "0.15.1"
[dev-dependencies] [dev-dependencies]

View file

@ -3,13 +3,14 @@
use std::sync::Arc; use std::sync::Arc;
use hir_def::{type_ref::TypeRef, LocalEnumVariantId};
use hir_expand::name::AsName;
use ra_arena::{impl_arena_id, Arena, RawId}; use ra_arena::{impl_arena_id, Arena, RawId};
use ra_syntax::ast::{self, NameOwner, StructKind, TypeAscriptionOwner}; use ra_syntax::ast::{self, NameOwner, StructKind, TypeAscriptionOwner};
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
type_ref::TypeRef, Enum, EnumVariant, FieldSource, HasSource, Module, Name, Source, Struct, StructField,
AsName, Enum, EnumVariant, FieldSource, HasSource, Module, Name, Source, Struct, StructField,
}; };
impl Struct { impl Struct {
@ -67,7 +68,7 @@ impl EnumVariant {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct EnumData { pub struct EnumData {
pub(crate) name: Option<Name>, pub(crate) name: Option<Name>,
pub(crate) variants: Arena<EnumVariantId, EnumVariantData>, pub(crate) variants: Arena<LocalEnumVariantId, EnumVariantData>,
} }
impl EnumData { impl EnumData {
@ -84,10 +85,6 @@ impl EnumData {
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct EnumVariantId(RawId);
impl_arena_id!(EnumVariantId);
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct EnumVariantData { pub(crate) struct EnumVariantData {
pub(crate) name: Option<Name>, pub(crate) name: Option<Name>,

View file

@ -5,11 +5,17 @@ pub(crate) mod docs;
use std::sync::Arc; use std::sync::Arc;
use ra_db::{CrateId, Edition, FileId}; use hir_def::{
builtin_type::BuiltinType,
type_ref::{Mutability, TypeRef},
CrateModuleId, LocalEnumVariantId, ModuleId,
};
use hir_expand::name::{self, AsName};
use ra_db::{CrateId, Edition};
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
use crate::{ use crate::{
adt::{EnumVariantId, StructFieldId, VariantDef}, adt::{StructFieldId, VariantDef},
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
diagnostics::DiagnosticSink, diagnostics::DiagnosticSink,
expr::{validation::ExprValidator, Body, BodySourceMap}, expr::{validation::ExprValidator, Body, BodySourceMap},
@ -19,20 +25,11 @@ use crate::{
TypeAliasId, TypeAliasId,
}, },
impl_block::ImplBlock, impl_block::ImplBlock,
name::{ nameres::{ImportId, ModuleScope, Namespace},
BOOL, CHAR, F32, F64, I128, I16, I32, I64, I8, ISIZE, SELF_TYPE, STR, U128, U16, U32, U64,
U8, USIZE,
},
nameres::{CrateModuleId, ImportId, ModuleScope, Namespace},
resolve::{Resolver, Scope, TypeNs}, resolve::{Resolver, Scope, TypeNs},
traits::TraitData, traits::TraitData,
ty::{ ty::{InferenceResult, TraitRef},
primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness}, Either, HasSource, Name, Ty,
InferenceResult, TraitRef,
},
type_ref::Mutability,
type_ref::TypeRef,
AsName, AstId, Either, HasSource, Name, Ty,
}; };
/// hir::Crate describes a single crate. It's the main interface with which /// hir::Crate describes a single crate. It's the main interface with which
@ -67,8 +64,7 @@ impl Crate {
pub fn root_module(self, db: &impl DefDatabase) -> Option<Module> { pub fn root_module(self, db: &impl DefDatabase) -> Option<Module> {
let module_id = db.crate_def_map(self).root(); let module_id = db.crate_def_map(self).root();
let module = Module { krate: self, module_id }; Some(Module::new(self, module_id))
Some(module)
} }
pub fn edition(self, db: &impl DefDatabase) -> Edition { pub fn edition(self, db: &impl DefDatabase) -> Edition {
@ -83,43 +79,7 @@ impl Crate {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Module { pub struct Module {
pub(crate) krate: Crate, pub(crate) id: ModuleId,
pub(crate) module_id: CrateModuleId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinType {
Char,
Bool,
Str,
Int(IntTy),
Float(FloatTy),
}
impl BuiltinType {
#[rustfmt::skip]
pub(crate) const ALL: &'static [(Name, BuiltinType)] = &[
(CHAR, BuiltinType::Char),
(BOOL, BuiltinType::Bool),
(STR, BuiltinType::Str),
(ISIZE, BuiltinType::Int(IntTy { signedness: Signedness::Signed, bitness: IntBitness::Xsize })),
(I8, BuiltinType::Int(IntTy { signedness: Signedness::Signed, bitness: IntBitness::X8 })),
(I16, BuiltinType::Int(IntTy { signedness: Signedness::Signed, bitness: IntBitness::X16 })),
(I32, BuiltinType::Int(IntTy { signedness: Signedness::Signed, bitness: IntBitness::X32 })),
(I64, BuiltinType::Int(IntTy { signedness: Signedness::Signed, bitness: IntBitness::X64 })),
(I128, BuiltinType::Int(IntTy { signedness: Signedness::Signed, bitness: IntBitness::X128 })),
(USIZE, BuiltinType::Int(IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::Xsize })),
(U8, BuiltinType::Int(IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X8 })),
(U16, BuiltinType::Int(IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X16 })),
(U32, BuiltinType::Int(IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X32 })),
(U64, BuiltinType::Int(IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X64 })),
(U128, BuiltinType::Int(IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X128 })),
(F32, BuiltinType::Float(FloatTy { bitness: FloatBitness::X32 })),
(F64, BuiltinType::Float(FloatTy { bitness: FloatBitness::X64 })),
];
} }
/// The defs which can be visible in the module. /// The defs which can be visible in the module.
@ -148,39 +108,19 @@ impl_froms!(
BuiltinType BuiltinType
); );
pub enum ModuleSource { pub use hir_def::ModuleSource;
SourceFile(ast::SourceFile),
Module(ast::Module),
}
impl ModuleSource {
pub(crate) fn new(
db: &(impl DefDatabase + AstDatabase),
file_id: Option<FileId>,
decl_id: Option<AstId<ast::Module>>,
) -> ModuleSource {
match (file_id, decl_id) {
(Some(file_id), _) => {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
(None, Some(item_id)) => {
let module = item_id.to_node(db);
assert!(module.item_list().is_some(), "expected inline module");
ModuleSource::Module(module)
}
(None, None) => panic!(),
}
}
}
impl Module { impl Module {
pub(crate) fn new(krate: Crate, crate_module_id: CrateModuleId) -> Module {
Module { id: ModuleId { krate: krate.crate_id, module_id: crate_module_id } }
}
/// Name of this module. /// Name of this module.
pub fn name(self, db: &impl DefDatabase) -> Option<Name> { pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate());
let parent = def_map[self.module_id].parent?; let parent = def_map[self.id.module_id].parent?;
def_map[parent].children.iter().find_map(|(name, module_id)| { def_map[parent].children.iter().find_map(|(name, module_id)| {
if *module_id == self.module_id { if *module_id == self.id.module_id {
Some(name.clone()) Some(name.clone())
} else { } else {
None None
@ -200,29 +140,29 @@ impl Module {
} }
/// Returns the crate this module is part of. /// Returns the crate this module is part of.
pub fn krate(self, _db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self) -> Crate {
Some(self.krate) Crate { crate_id: self.id.krate }
} }
/// Topmost parent of this module. Every module has a `crate_root`, but some /// Topmost parent of this module. Every module has a `crate_root`, but some
/// might be missing `krate`. This can happen if a module's file is not included /// might be missing `krate`. This can happen if a module's file is not included
/// in the module tree of any target in `Cargo.toml`. /// in the module tree of any target in `Cargo.toml`.
pub fn crate_root(self, db: &impl DefDatabase) -> Module { pub fn crate_root(self, db: &impl DefDatabase) -> Module {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate());
self.with_module_id(def_map.root()) self.with_module_id(def_map.root())
} }
/// Finds a child module with the specified name. /// Finds a child module with the specified name.
pub fn child(self, db: &impl HirDatabase, name: &Name) -> Option<Module> { pub fn child(self, db: &impl HirDatabase, name: &Name) -> Option<Module> {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate());
let child_id = def_map[self.module_id].children.get(name)?; let child_id = def_map[self.id.module_id].children.get(name)?;
Some(self.with_module_id(*child_id)) Some(self.with_module_id(*child_id))
} }
/// Iterates over all child modules. /// Iterates over all child modules.
pub fn children(self, db: &impl DefDatabase) -> impl Iterator<Item = Module> { pub fn children(self, db: &impl DefDatabase) -> impl Iterator<Item = Module> {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate());
let children = def_map[self.module_id] let children = def_map[self.id.module_id]
.children .children
.iter() .iter()
.map(|(_, module_id)| self.with_module_id(*module_id)) .map(|(_, module_id)| self.with_module_id(*module_id))
@ -232,8 +172,8 @@ impl Module {
/// Finds a parent module. /// Finds a parent module.
pub fn parent(self, db: &impl DefDatabase) -> Option<Module> { pub fn parent(self, db: &impl DefDatabase) -> Option<Module> {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate());
let parent_id = def_map[self.module_id].parent?; let parent_id = def_map[self.id.module_id].parent?;
Some(self.with_module_id(parent_id)) Some(self.with_module_id(parent_id))
} }
@ -249,11 +189,11 @@ impl Module {
/// Returns a `ModuleScope`: a set of items, visible in this module. /// Returns a `ModuleScope`: a set of items, visible in this module.
pub fn scope(self, db: &impl HirDatabase) -> ModuleScope { pub fn scope(self, db: &impl HirDatabase) -> ModuleScope {
db.crate_def_map(self.krate)[self.module_id].scope.clone() db.crate_def_map(self.krate())[self.id.module_id].scope.clone()
} }
pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) { pub fn diagnostics(self, db: &impl HirDatabase, sink: &mut DiagnosticSink) {
db.crate_def_map(self.krate).add_diagnostics(db, self.module_id, sink); db.crate_def_map(self.krate()).add_diagnostics(db, self.id.module_id, sink);
for decl in self.declarations(db) { for decl in self.declarations(db) {
match decl { match decl {
crate::ModuleDef::Function(f) => f.diagnostics(db, sink), crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
@ -277,13 +217,13 @@ impl Module {
} }
pub(crate) fn resolver(self, db: &impl DefDatabase) -> Resolver { pub(crate) fn resolver(self, db: &impl DefDatabase) -> Resolver {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate());
Resolver::default().push_module_scope(def_map, self.module_id) Resolver::default().push_module_scope(def_map, self.id.module_id)
} }
pub fn declarations(self, db: &impl DefDatabase) -> Vec<ModuleDef> { pub fn declarations(self, db: &impl DefDatabase) -> Vec<ModuleDef> {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate());
def_map[self.module_id] def_map[self.id.module_id]
.scope .scope
.entries() .entries()
.filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None }) .filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
@ -303,7 +243,7 @@ impl Module {
} }
fn with_module_id(self, module_id: CrateModuleId) -> Module { fn with_module_id(self, module_id: CrateModuleId) -> Module {
Module { module_id, krate: self.krate } Module::new(self.krate(), module_id)
} }
} }
@ -340,11 +280,11 @@ pub struct Struct {
impl Struct { impl Struct {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
self.id.module(db) Module { id: self.id.module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
self.module(db).krate(db) Some(self.module(db).krate())
} }
pub fn name(self, db: &impl DefDatabase) -> Option<Name> { pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
@ -402,7 +342,7 @@ impl Union {
} }
pub fn module(self, db: &impl HirDatabase) -> Module { pub fn module(self, db: &impl HirDatabase) -> Module {
self.id.module(db) Module { id: self.id.module(db) }
} }
pub fn ty(self, db: &impl HirDatabase) -> Ty { pub fn ty(self, db: &impl HirDatabase) -> Ty {
@ -428,11 +368,11 @@ pub struct Enum {
impl Enum { impl Enum {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
self.id.module(db) Module { id: self.id.module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
self.module(db).krate(db) Some(self.module(db).krate())
} }
pub fn name(self, db: &impl DefDatabase) -> Option<Name> { pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
@ -470,7 +410,7 @@ impl Enum {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumVariant { pub struct EnumVariant {
pub(crate) parent: Enum, pub(crate) parent: Enum,
pub(crate) id: EnumVariantId, pub(crate) id: LocalEnumVariantId,
} }
impl EnumVariant { impl EnumVariant {
@ -523,12 +463,14 @@ impl Adt {
} }
pub fn krate(self, db: &impl HirDatabase) -> Option<Crate> { pub fn krate(self, db: &impl HirDatabase) -> Option<Crate> {
match self { Some(
Adt::Struct(s) => s.module(db), match self {
Adt::Union(s) => s.module(db), Adt::Struct(s) => s.module(db),
Adt::Enum(e) => e.module(db), Adt::Union(s) => s.module(db),
} Adt::Enum(e) => e.module(db),
.krate(db) }
.krate(),
)
} }
pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver { pub(crate) fn resolver(self, db: &impl HirDatabase) -> Resolver {
@ -643,7 +585,7 @@ impl FnData {
let self_type = if let Some(type_ref) = self_param.ascribed_type() { let self_type = if let Some(type_ref) = self_param.ascribed_type() {
TypeRef::from_ast(type_ref) TypeRef::from_ast(type_ref)
} else { } else {
let self_type = TypeRef::Path(SELF_TYPE.into()); let self_type = TypeRef::Path(name::SELF_TYPE.into());
match self_param.kind() { match self_param.kind() {
ast::SelfParamKind::Owned => self_type, ast::SelfParamKind::Owned => self_type,
ast::SelfParamKind::Ref => { ast::SelfParamKind::Ref => {
@ -692,11 +634,11 @@ impl FnData {
impl Function { impl Function {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
self.id.module(db) Module { id: self.id.module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
self.module(db).krate(db) Some(self.module(db).krate())
} }
pub fn name(self, db: &impl HirDatabase) -> Name { pub fn name(self, db: &impl HirDatabase) -> Name {
@ -770,11 +712,11 @@ pub struct Const {
impl Const { impl Const {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
self.id.module(db) Module { id: self.id.module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
self.module(db).krate(db) Some(self.module(db).krate())
} }
pub fn data(self, db: &impl HirDatabase) -> Arc<ConstData> { pub fn data(self, db: &impl HirDatabase) -> Arc<ConstData> {
@ -867,11 +809,11 @@ pub struct Static {
impl Static { impl Static {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
self.id.module(db) Module { id: self.id.module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
self.module(db).krate(db) Some(self.module(db).krate())
} }
pub fn data(self, db: &impl HirDatabase) -> Arc<ConstData> { pub fn data(self, db: &impl HirDatabase) -> Arc<ConstData> {
@ -896,7 +838,7 @@ pub struct Trait {
impl Trait { impl Trait {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
self.id.module(db) Module { id: self.id.module(db) }
} }
pub fn name(self, db: &impl DefDatabase) -> Option<Name> { pub fn name(self, db: &impl DefDatabase) -> Option<Name> {
@ -917,9 +859,7 @@ impl Trait {
.where_predicates .where_predicates
.iter() .iter()
.filter_map(|pred| match &pred.type_ref { .filter_map(|pred| match &pred.type_ref {
TypeRef::Path(p) if p.as_ident() == Some(&crate::name::SELF_TYPE) => { TypeRef::Path(p) if p.as_ident() == Some(&name::SELF_TYPE) => pred.bound.as_path(),
pred.bound.as_path()
}
_ => None, _ => None,
}) })
.filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path) { .filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path) {
@ -998,11 +938,11 @@ pub struct TypeAlias {
impl TypeAlias { impl TypeAlias {
pub fn module(self, db: &impl DefDatabase) -> Module { pub fn module(self, db: &impl DefDatabase) -> Module {
self.id.module(db) Module { id: self.id.module(db) }
} }
pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> { pub fn krate(self, db: &impl DefDatabase) -> Option<Crate> {
self.module(db).krate(db) Some(self.module(db).krate())
} }
/// The containing impl block, if this is a method. /// The containing impl block, if this is a method.

View file

@ -1,9 +1,6 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use ra_syntax::{ use ra_syntax::ast::{self, AstNode};
ast::{self, AstNode},
SyntaxNode,
};
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
@ -12,34 +9,21 @@ use crate::{
ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union, ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union,
}; };
#[derive(Debug, PartialEq, Eq, Clone, Copy)] pub use hir_def::Source;
pub struct Source<T> {
pub file_id: HirFileId,
pub ast: T,
}
pub trait HasSource { pub trait HasSource {
type Ast; type Ast;
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<Self::Ast>; fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<Self::Ast>;
} }
impl<T> Source<T> {
pub(crate) fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
Source { file_id: self.file_id, ast: f(self.ast) }
}
pub(crate) fn file_syntax(&self, db: &impl AstDatabase) -> SyntaxNode {
db.parse_or_expand(self.file_id).expect("source created from invalid file")
}
}
/// NB: Module is !HasSource, because it has two source nodes at the same time: /// NB: Module is !HasSource, because it has two source nodes at the same time:
/// definition and declaration. /// definition and declaration.
impl Module { impl Module {
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
pub fn definition_source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ModuleSource> { pub fn definition_source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ModuleSource> {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate());
let decl_id = def_map[self.module_id].declaration; let decl_id = def_map[self.id.module_id].declaration;
let file_id = def_map[self.module_id].definition; let file_id = def_map[self.id.module_id].definition;
let ast = ModuleSource::new(db, file_id, decl_id); let ast = ModuleSource::new(db, file_id, decl_id);
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id()); let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id());
Source { file_id, ast } Source { file_id, ast }
@ -51,8 +35,8 @@ impl Module {
self, self,
db: &(impl DefDatabase + AstDatabase), db: &(impl DefDatabase + AstDatabase),
) -> Option<Source<ast::Module>> { ) -> Option<Source<ast::Module>> {
let def_map = db.crate_def_map(self.krate); let def_map = db.crate_def_map(self.krate());
let decl = def_map[self.module_id].declaration?; let decl = def_map[self.id.module_id].declaration?;
let ast = decl.to_node(db); let ast = decl.to_node(db);
Some(Source { file_id: decl.file_id(), ast }) Some(Source { file_id: decl.file_id(), ast })
} }

View file

@ -2,8 +2,8 @@
use std::sync::Arc; use std::sync::Arc;
use ra_db::{salsa, SourceDatabase}; use ra_db::salsa;
use ra_syntax::{ast, Parse, SmolStr, SyntaxNode}; use ra_syntax::SmolStr;
use crate::{ use crate::{
adt::{EnumData, StructData}, adt::{EnumData, StructData},
@ -12,81 +12,30 @@ use crate::{
ids, ids,
impl_block::{ImplBlock, ImplSourceMap, ModuleImplBlocks}, impl_block::{ImplBlock, ImplSourceMap, ModuleImplBlocks},
lang_item::{LangItemTarget, LangItems}, lang_item::{LangItemTarget, LangItems},
nameres::{CrateDefMap, ImportSourceMap, Namespace, RawItems}, nameres::{CrateDefMap, Namespace},
traits::TraitData, traits::TraitData,
ty::{ ty::{
method_resolution::CrateImplBlocks, traits::Impl, CallableDef, FnSig, GenericPredicate, method_resolution::CrateImplBlocks, traits::Impl, CallableDef, FnSig, GenericPredicate,
InferenceResult, Substs, Ty, TypableDef, TypeCtor, InferenceResult, Substs, Ty, TypableDef, TypeCtor,
}, },
type_alias::TypeAliasData, type_alias::TypeAliasData,
AstIdMap, Const, ConstData, Crate, DefWithBody, Enum, ErasedFileAstId, ExprScopes, FnData, Const, ConstData, Crate, DefWithBody, Enum, ExprScopes, FnData, Function, Module, Static,
Function, HirFileId, MacroCallLoc, MacroDefId, Module, Static, Struct, StructField, Trait, Struct, StructField, Trait, TypeAlias,
TypeAlias,
}; };
/// We store all interned things in the single QueryGroup. pub use hir_def::db::{
/// DefDatabase2, DefDatabase2Storage, InternDatabase, InternDatabaseStorage, RawItemsQuery,
/// This is done mainly to allow both "volatile" `AstDatabase` and "stable" RawItemsWithSourceMapQuery,
/// `DefDatabase` to access macros, without adding hard dependencies between the };
/// two. pub use hir_expand::db::{
#[salsa::query_group(InternDatabaseStorage)] AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
pub trait InternDatabase: SourceDatabase { ParseMacroQuery,
#[salsa::interned] };
fn intern_macro(&self, macro_call: MacroCallLoc) -> ids::MacroCallId;
#[salsa::interned]
fn intern_function(&self, loc: ids::ItemLoc<ast::FnDef>) -> ids::FunctionId;
#[salsa::interned]
fn intern_struct(&self, loc: ids::ItemLoc<ast::StructDef>) -> ids::StructId;
#[salsa::interned]
fn intern_enum(&self, loc: ids::ItemLoc<ast::EnumDef>) -> ids::EnumId;
#[salsa::interned]
fn intern_const(&self, loc: ids::ItemLoc<ast::ConstDef>) -> ids::ConstId;
#[salsa::interned]
fn intern_static(&self, loc: ids::ItemLoc<ast::StaticDef>) -> ids::StaticId;
#[salsa::interned]
fn intern_trait(&self, loc: ids::ItemLoc<ast::TraitDef>) -> ids::TraitId;
#[salsa::interned]
fn intern_type_alias(&self, loc: ids::ItemLoc<ast::TypeAliasDef>) -> ids::TypeAliasId;
// Interned IDs for Chalk integration
#[salsa::interned]
fn intern_type_ctor(&self, type_ctor: TypeCtor) -> ids::TypeCtorId;
#[salsa::interned]
fn intern_impl(&self, impl_: Impl) -> ids::GlobalImplId;
}
/// This database has access to source code, so queries here are not really
/// incremental.
#[salsa::query_group(AstDatabaseStorage)]
pub trait AstDatabase: InternDatabase {
#[salsa::invoke(crate::source_id::AstIdMap::ast_id_map_query)]
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::transparent]
#[salsa::invoke(crate::source_id::AstIdMap::file_item_query)]
fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> SyntaxNode;
#[salsa::transparent]
#[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)]
fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
#[salsa::invoke(crate::ids::HirFileId::parse_macro_query)]
fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>;
#[salsa::invoke(crate::ids::macro_def_query)]
fn macro_def(&self, macro_id: MacroDefId) -> Option<Arc<mbe::MacroRules>>;
#[salsa::invoke(crate::ids::macro_arg_query)]
fn macro_arg(&self, macro_call: ids::MacroCallId) -> Option<Arc<tt::Subtree>>;
#[salsa::invoke(crate::ids::macro_expand_query)]
fn macro_expand(&self, macro_call: ids::MacroCallId) -> Result<Arc<tt::Subtree>, String>;
}
// This database uses `AstDatabase` internally, // This database uses `AstDatabase` internally,
#[salsa::query_group(DefDatabaseStorage)] #[salsa::query_group(DefDatabaseStorage)]
#[salsa::requires(AstDatabase)] #[salsa::requires(AstDatabase)]
pub trait DefDatabase: InternDatabase + HirDebugDatabase { pub trait DefDatabase: HirDebugDatabase + DefDatabase2 {
#[salsa::invoke(crate::adt::StructData::struct_data_query)] #[salsa::invoke(crate::adt::StructData::struct_data_query)]
fn struct_data(&self, s: Struct) -> Arc<StructData>; fn struct_data(&self, s: Struct) -> Arc<StructData>;
@ -99,15 +48,6 @@ pub trait DefDatabase: InternDatabase + HirDebugDatabase {
#[salsa::invoke(crate::traits::TraitItemsIndex::trait_items_index)] #[salsa::invoke(crate::traits::TraitItemsIndex::trait_items_index)]
fn trait_items_index(&self, module: Module) -> crate::traits::TraitItemsIndex; fn trait_items_index(&self, module: Module) -> crate::traits::TraitItemsIndex;
#[salsa::invoke(RawItems::raw_items_with_source_map_query)]
fn raw_items_with_source_map(
&self,
file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>);
#[salsa::invoke(RawItems::raw_items_query)]
fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;
#[salsa::invoke(CrateDefMap::crate_def_map_query)] #[salsa::invoke(CrateDefMap::crate_def_map_query)]
fn crate_def_map(&self, krate: Crate) -> Arc<CrateDefMap>; fn crate_def_map(&self, krate: Crate) -> Arc<CrateDefMap>;
@ -202,6 +142,12 @@ pub trait HirDatabase: DefDatabase + AstDatabase {
#[salsa::invoke(crate::ty::traits::trait_solver_query)] #[salsa::invoke(crate::ty::traits::trait_solver_query)]
fn trait_solver(&self, krate: Crate) -> crate::ty::traits::TraitSolver; fn trait_solver(&self, krate: Crate) -> crate::ty::traits::TraitSolver;
// Interned IDs for Chalk integration
#[salsa::interned]
fn intern_type_ctor(&self, type_ctor: TypeCtor) -> ids::TypeCtorId;
#[salsa::interned]
fn intern_impl(&self, impl_: Impl) -> ids::GlobalImplId;
#[salsa::invoke(crate::ty::traits::chalk::associated_ty_data_query)] #[salsa::invoke(crate::ty::traits::chalk::associated_ty_data_query)]
fn associated_ty_data(&self, id: chalk_ir::TypeId) -> Arc<chalk_rust_ir::AssociatedTyDatum>; fn associated_ty_data(&self, id: chalk_ir::TypeId) -> Arc<chalk_rust_ir::AssociatedTyDatum>;

View file

@ -36,12 +36,6 @@ impl Module {
} }
} }
impl HirFileId {
pub fn debug(self, db: &impl HirDebugDatabase) -> impl fmt::Debug + '_ {
debug_fn(move |fmt| db.debug_hir_file_id(self, fmt))
}
}
pub trait HirDebugHelper: HirDatabase { pub trait HirDebugHelper: HirDatabase {
fn crate_name(&self, _krate: CrateId) -> Option<String> { fn crate_name(&self, _krate: CrateId) -> Option<String> {
None None

View file

@ -6,15 +6,17 @@ pub(crate) mod validation;
use std::{ops::Index, sync::Arc}; use std::{ops::Index, sync::Arc};
use hir_def::{
path::GenericArgs,
type_ref::{Mutability, TypeRef},
};
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
use ra_syntax::{ast, AstPtr}; use ra_syntax::{ast, AstPtr};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
path::GenericArgs,
ty::primitive::{UncertainFloatTy, UncertainIntTy}, ty::primitive::{UncertainFloatTy, UncertainIntTy},
type_ref::{Mutability, TypeRef},
DefWithBody, Either, HasSource, Name, Path, Resolver, Source, DefWithBody, Either, HasSource, Name, Path, Resolver, Source,
}; };

View file

@ -1,5 +1,10 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir_def::{path::GenericArgs, type_ref::TypeRef};
use hir_expand::{
hygiene::Hygiene,
name::{self, AsName, Name},
};
use ra_arena::Arena; use ra_arena::Arena;
use ra_syntax::{ use ra_syntax::{
ast::{ ast::{
@ -12,11 +17,8 @@ use test_utils::tested_by;
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
name::{AsName, Name, SELF_PARAM},
path::GenericArgs,
ty::primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy}, ty::primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy},
type_ref::TypeRef, AstId, DefWithBody, Either, HirFileId, MacroCallLoc, MacroFileKind, Mutability, Path, Resolver,
DefWithBody, Either, HirFileId, MacroCallLoc, MacroFileKind, Mutability, Path, Resolver,
Source, Source,
}; };
@ -78,7 +80,7 @@ where
let ptr = AstPtr::new(&self_param); let ptr = AstPtr::new(&self_param);
let param_pat = self.alloc_pat( let param_pat = self.alloc_pat(
Pat::Bind { Pat::Bind {
name: SELF_PARAM, name: name::SELF_PARAM,
mode: BindingAnnotation::Unannotated, mode: BindingAnnotation::Unannotated,
subpat: None, subpat: None,
}, },
@ -458,15 +460,14 @@ where
ast::Expr::Label(_e) => self.alloc_expr(Expr::Missing, syntax_ptr), ast::Expr::Label(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::Expr::RangeExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr), ast::Expr::RangeExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::Expr::MacroCall(e) => { ast::Expr::MacroCall(e) => {
let ast_id = self let ast_id = AstId::new(
.db self.current_file_id,
.ast_id_map(self.current_file_id) self.db.ast_id_map(self.current_file_id).ast_id(&e),
.ast_id(&e) );
.with_file_id(self.current_file_id);
if let Some(path) = e.path().and_then(|path| self.parse_path(path)) { if let Some(path) = e.path().and_then(|path| self.parse_path(path)) {
if let Some(def) = self.resolver.resolve_path_as_macro(self.db, &path) { if let Some(def) = self.resolver.resolve_path_as_macro(self.db, &path) {
let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); let call_id = self.db.intern_macro(MacroCallLoc { def: def.id, ast_id });
let file_id = call_id.as_file(MacroFileKind::Expr); let file_id = call_id.as_file(MacroFileKind::Expr);
if let Some(node) = self.db.parse_or_expand(file_id) { if let Some(node) = self.db.parse_or_expand(file_id) {
if let Some(expr) = ast::Expr::cast(node) { if let Some(expr) = ast::Expr::cast(node) {
@ -596,7 +597,8 @@ where
} }
fn parse_path(&mut self, path: ast::Path) -> Option<Path> { fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
Path::from_src(Source { ast: path, file_id: self.current_file_id }, self.db) let hygiene = Hygiene::new(self.db, self.current_file_id);
Path::from_src(path, &hygiene)
} }
} }

View file

@ -2,6 +2,7 @@
use std::sync::Arc; use std::sync::Arc;
use hir_def::path::known;
use ra_syntax::ast; use ra_syntax::ast;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
@ -9,7 +10,6 @@ use crate::{
db::HirDatabase, db::HirDatabase,
diagnostics::{DiagnosticSink, MissingFields, MissingOkInTailExpr}, diagnostics::{DiagnosticSink, MissingFields, MissingOkInTailExpr},
expr::AstPtr, expr::AstPtr,
path::known,
ty::{ApplicationTy, InferenceResult, Ty, TypeCtor}, ty::{ApplicationTy, InferenceResult, Ty, TypeCtor},
Adt, Function, Name, Path, Adt, Function, Name, Path,
}; };

View file

@ -1,17 +1,12 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use ra_db::{FileId, FilePosition}; use hir_expand::name::AsName;
use ra_syntax::{ use ra_syntax::ast::{self, AstNode, NameOwner};
algo::find_node_at_offset,
ast::{self, AstNode, NameOwner},
SyntaxNode,
};
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
ids::{AstItemDef, LocationCtx}, ids::{AstItemDef, LocationCtx},
name::AsName, AstId, Const, Crate, Enum, EnumVariant, FieldSource, Function, HasSource, ImplBlock, Module,
Const, Crate, Enum, EnumVariant, FieldSource, Function, HasSource, ImplBlock, Module,
ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias, Union, VariantDef, ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias, Union, VariantDef,
}; };
@ -129,41 +124,6 @@ impl FromSource for StructField {
} }
} }
// FIXME: simplify it
impl ModuleSource {
pub fn from_position(
db: &(impl DefDatabase + AstDatabase),
position: FilePosition,
) -> ModuleSource {
let parse = db.parse(position.file_id);
match &find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) {
Some(m) if !m.has_semi() => ModuleSource::Module(m.clone()),
_ => {
let source_file = parse.tree();
ModuleSource::SourceFile(source_file)
}
}
}
pub fn from_child_node(
db: &(impl DefDatabase + AstDatabase),
file_id: FileId,
child: &SyntaxNode,
) -> ModuleSource {
if let Some(m) = child.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi()) {
ModuleSource::Module(m)
} else {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
}
pub fn from_file_id(db: &(impl DefDatabase + AstDatabase), file_id: FileId) -> ModuleSource {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
}
impl Module { impl Module {
pub fn from_declaration(db: &impl HirDatabase, src: Source<ast::Module>) -> Option<Self> { pub fn from_declaration(db: &impl HirDatabase, src: Source<ast::Module>) -> Option<Self> {
let src_parent = Source { let src_parent = Source {
@ -183,7 +143,7 @@ impl Module {
ModuleSource::Module(ref module) => { ModuleSource::Module(ref module) => {
assert!(!module.has_semi()); assert!(!module.has_semi());
let ast_id_map = db.ast_id_map(src.file_id); let ast_id_map = db.ast_id_map(src.file_id);
let item_id = ast_id_map.ast_id(module).with_file_id(src.file_id); let item_id = AstId::new(src.file_id, ast_id_map.ast_id(module));
Some(item_id) Some(item_id)
} }
ModuleSource::SourceFile(_) => None, ModuleSource::SourceFile(_) => None,
@ -195,7 +155,7 @@ impl Module {
.find_map(|krate| { .find_map(|krate| {
let def_map = db.crate_def_map(krate); let def_map = db.crate_def_map(krate);
let module_id = def_map.find_module_by_source(src.file_id, decl_id)?; let module_id = def_map.find_module_by_source(src.file_id, decl_id)?;
Some(Module { krate, module_id }) Some(Module::new(krate, module_id))
}) })
} }
} }
@ -208,6 +168,6 @@ where
let module_src = let module_src =
crate::ModuleSource::from_child_node(db, src.file_id.original_file(db), &src.ast.syntax()); crate::ModuleSource::from_child_node(db, src.file_id.original_file(db), &src.ast.syntax());
let module = Module::from_definition(db, Source { file_id: src.file_id, ast: module_src })?; let module = Module::from_definition(db, Source { file_id: src.file_id, ast: module_src })?;
let ctx = LocationCtx::new(db, module, src.file_id); let ctx = LocationCtx::new(db, module.id, src.file_id);
Some(DEF::from_ast(ctx, &src.ast)) Some(DEF::from_ast(ctx, &src.ast))
} }

View file

@ -5,15 +5,17 @@
use std::sync::Arc; use std::sync::Arc;
use hir_def::{
path::Path,
type_ref::{TypeBound, TypeRef},
};
use hir_expand::name::{self, AsName};
use ra_syntax::ast::{self, DefaultTypeParamOwner, NameOwner, TypeBoundsOwner, TypeParamsOwner}; use ra_syntax::ast::{self, DefaultTypeParamOwner, NameOwner, TypeBoundsOwner, TypeParamsOwner};
use crate::{ use crate::{
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
name::SELF_TYPE, Adt, Const, Container, Enum, EnumVariant, Function, HasSource, ImplBlock, Name, Struct, Trait,
path::Path, TypeAlias, Union,
type_ref::{TypeBound, TypeRef},
Adt, AsName, Const, Container, Enum, EnumVariant, Function, HasSource, ImplBlock, Name, Struct,
Trait, TypeAlias, Union,
}; };
/// Data about a generic parameter (to a function, struct, impl, ...). /// Data about a generic parameter (to a function, struct, impl, ...).
@ -94,11 +96,15 @@ impl GenericParams {
GenericDef::Adt(Adt::Enum(it)) => generics.fill(&it.source(db).ast, start), GenericDef::Adt(Adt::Enum(it)) => generics.fill(&it.source(db).ast, start),
GenericDef::Trait(it) => { GenericDef::Trait(it) => {
// traits get the Self type as an implicit first type parameter // traits get the Self type as an implicit first type parameter
generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None }); generics.params.push(GenericParam {
idx: start,
name: name::SELF_TYPE,
default: None,
});
generics.fill(&it.source(db).ast, start + 1); generics.fill(&it.source(db).ast, start + 1);
// add super traits as bounds on Self // add super traits as bounds on Self
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
let self_param = TypeRef::Path(SELF_TYPE.into()); let self_param = TypeRef::Path(name::SELF_TYPE.into());
generics.fill_bounds(&it.source(db).ast, self_param); generics.fill_bounds(&it.source(db).ast, self_param);
} }
GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start), GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start),

View file

@ -1,168 +1,17 @@
//! FIXME: write short doc here //! hir makes heavy use of ids: integer (u32) handlers to various things. You
//! can think of id as a pointer (but without a lifetime) or a file descriptor
//! (but for hir objects).
//!
//! This module defines a bunch of ids we are using. The most important ones are
//! probably `HirFileId` and `DefId`.
use std::{ use ra_db::salsa;
hash::{Hash, Hasher},
sync::Arc, pub use hir_def::{
AstItemDef, ConstId, EnumId, FunctionId, ItemLoc, LocationCtx, StaticId, StructId, TraitId,
TypeAliasId,
}; };
pub use hir_expand::{HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, MacroFileKind};
use mbe::MacroRules;
use ra_db::{salsa, FileId};
use ra_prof::profile;
use ra_syntax::{ast, AstNode, Parse, SyntaxNode};
use crate::{
db::{AstDatabase, DefDatabase, InternDatabase},
AstId, Crate, FileAstId, Module, Source,
};
/// hir makes heavy use of ids: integer (u32) handlers to various things. You
/// can think of id as a pointer (but without a lifetime) or a file descriptor
/// (but for hir objects).
///
/// This module defines a bunch of ids we are using. The most important ones are
/// probably `HirFileId` and `DefId`.
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
/// file". To assign an id to such a file, we use the id of the macro call that
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
/// written by user), or a `MacroCallId` (source code produced by macro).
///
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
/// containing the call plus the offset of the macro call in the file. Note that
/// this is a recursive definition! However, the size_of of `HirFileId` is
/// finite (because everything bottoms out at the real `FileId`) and small
/// (`MacroCallId` uses the location interner).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct HirFileId(HirFileIdRepr);
impl HirFileId {
/// For macro-expansion files, returns the file original source file the
/// expansion originated from.
pub fn original_file(self, db: &impl InternDatabase) -> FileId {
match self.0 {
HirFileIdRepr::File(file_id) => file_id,
HirFileIdRepr::Macro(macro_file) => {
let loc = macro_file.macro_call_id.loc(db);
loc.ast_id.file_id().original_file(db)
}
}
}
/// Get the crate which the macro lives in, if it is a macro file.
pub(crate) fn macro_crate(self, db: &impl AstDatabase) -> Option<Crate> {
match self.0 {
HirFileIdRepr::File(_) => None,
HirFileIdRepr::Macro(macro_file) => {
let loc = macro_file.macro_call_id.loc(db);
Some(loc.def.krate)
}
}
}
pub(crate) fn parse_or_expand_query(
db: &impl AstDatabase,
file_id: HirFileId,
) -> Option<SyntaxNode> {
match file_id.0 {
HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
HirFileIdRepr::Macro(macro_file) => {
db.parse_macro(macro_file).map(|it| it.syntax_node())
}
}
}
pub(crate) fn parse_macro_query(
db: &impl AstDatabase,
macro_file: MacroFile,
) -> Option<Parse<SyntaxNode>> {
let _p = profile("parse_macro_query");
let macro_call_id = macro_file.macro_call_id;
let tt = db
.macro_expand(macro_call_id)
.map_err(|err| {
// Note:
// The final goal we would like to make all parse_macro success,
// such that the following log will not call anyway.
log::warn!("fail on macro_parse: (reason: {})", err,);
})
.ok()?;
match macro_file.macro_file_kind {
MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax),
MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum HirFileIdRepr {
File(FileId),
Macro(MacroFile),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFile {
macro_call_id: MacroCallId,
macro_file_kind: MacroFileKind,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) enum MacroFileKind {
Items,
Expr,
}
impl From<FileId> for HirFileId {
fn from(file_id: FileId) -> HirFileId {
HirFileId(HirFileIdRepr::File(file_id))
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroDefId {
pub(crate) ast_id: AstId<ast::MacroCall>,
pub(crate) krate: Crate,
}
pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> {
let macro_call = id.ast_id.to_node(db);
let arg = macro_call.token_tree()?;
let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| {
log::warn!("fail on macro_def to token tree: {:#?}", arg);
None
})?;
let rules = MacroRules::parse(&tt).ok().or_else(|| {
log::warn!("fail on macro_def parse: {:#?}", tt);
None
})?;
Some(Arc::new(rules))
}
pub(crate) fn macro_arg_query(db: &impl AstDatabase, id: MacroCallId) -> Option<Arc<tt::Subtree>> {
let loc = id.loc(db);
let macro_call = loc.ast_id.to_node(db);
let arg = macro_call.token_tree()?;
let (tt, _) = mbe::ast_to_token_tree(&arg)?;
Some(Arc::new(tt))
}
pub(crate) fn macro_expand_query(
db: &impl AstDatabase,
id: MacroCallId,
) -> Result<Arc<tt::Subtree>, String> {
let loc = id.loc(db);
let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
let tt = macro_rules.expand(&macro_arg).map_err(|err| format!("{:?}", err))?;
// Set a hard limit for the expanded tt
let count = tt.count();
if count > 65536 {
return Err(format!("Total tokens count exceed limit : count = {}", count));
}
Ok(Arc::new(tt))
}
macro_rules! impl_intern_key { macro_rules! impl_intern_key {
($name:ident) => { ($name:ident) => {
@ -177,192 +26,6 @@ macro_rules! impl_intern_key {
}; };
} }
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroCallId(salsa::InternId);
impl_intern_key!(MacroCallId);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc {
pub(crate) def: MacroDefId,
pub(crate) ast_id: AstId<ast::MacroCall>,
}
impl MacroCallId {
pub(crate) fn loc(self, db: &impl InternDatabase) -> MacroCallLoc {
db.lookup_intern_macro(self)
}
pub(crate) fn as_file(self, kind: MacroFileKind) -> HirFileId {
let macro_file = MacroFile { macro_call_id: self, macro_file_kind: kind };
HirFileId(HirFileIdRepr::Macro(macro_file))
}
}
impl MacroCallLoc {
pub(crate) fn id(self, db: &impl InternDatabase) -> MacroCallId {
db.intern_macro(self)
}
}
#[derive(Debug)]
pub struct ItemLoc<N: AstNode> {
pub(crate) module: Module,
ast_id: AstId<N>,
}
impl<N: AstNode> PartialEq for ItemLoc<N> {
fn eq(&self, other: &Self) -> bool {
self.module == other.module && self.ast_id == other.ast_id
}
}
impl<N: AstNode> Eq for ItemLoc<N> {}
impl<N: AstNode> Hash for ItemLoc<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.module.hash(hasher);
self.ast_id.hash(hasher);
}
}
impl<N: AstNode> Clone for ItemLoc<N> {
fn clone(&self) -> ItemLoc<N> {
ItemLoc { module: self.module, ast_id: self.ast_id }
}
}
#[derive(Clone, Copy)]
pub(crate) struct LocationCtx<DB> {
db: DB,
module: Module,
file_id: HirFileId,
}
impl<'a, DB: DefDatabase> LocationCtx<&'a DB> {
pub(crate) fn new(db: &'a DB, module: Module, file_id: HirFileId) -> LocationCtx<&'a DB> {
LocationCtx { db, module, file_id }
}
}
impl<'a, DB: DefDatabase + AstDatabase> LocationCtx<&'a DB> {
pub(crate) fn to_def<N, DEF>(self, ast: &N) -> DEF
where
N: AstNode,
DEF: AstItemDef<N>,
{
DEF::from_ast(self, ast)
}
}
pub(crate) trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
fn intern(db: &impl DefDatabase, loc: ItemLoc<N>) -> Self;
fn lookup_intern(self, db: &impl DefDatabase) -> ItemLoc<N>;
fn from_ast(ctx: LocationCtx<&(impl AstDatabase + DefDatabase)>, ast: &N) -> Self {
let items = ctx.db.ast_id_map(ctx.file_id);
let item_id = items.ast_id(ast);
Self::from_ast_id(ctx, item_id)
}
fn from_ast_id(ctx: LocationCtx<&impl DefDatabase>, ast_id: FileAstId<N>) -> Self {
let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) };
Self::intern(ctx.db, loc)
}
fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<N> {
let loc = self.lookup_intern(db);
let ast = loc.ast_id.to_node(db);
Source { file_id: loc.ast_id.file_id(), ast }
}
fn module(self, db: &impl DefDatabase) -> Module {
let loc = self.lookup_intern(db);
loc.module
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct FunctionId(salsa::InternId);
impl_intern_key!(FunctionId);
impl AstItemDef<ast::FnDef> for FunctionId {
fn intern(db: &impl DefDatabase, loc: ItemLoc<ast::FnDef>) -> Self {
db.intern_function(loc)
}
fn lookup_intern(self, db: &impl DefDatabase) -> ItemLoc<ast::FnDef> {
db.lookup_intern_function(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructId(salsa::InternId);
impl_intern_key!(StructId);
impl AstItemDef<ast::StructDef> for StructId {
fn intern(db: &impl DefDatabase, loc: ItemLoc<ast::StructDef>) -> Self {
db.intern_struct(loc)
}
fn lookup_intern(self, db: &impl DefDatabase) -> ItemLoc<ast::StructDef> {
db.lookup_intern_struct(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumId(salsa::InternId);
impl_intern_key!(EnumId);
impl AstItemDef<ast::EnumDef> for EnumId {
fn intern(db: &impl DefDatabase, loc: ItemLoc<ast::EnumDef>) -> Self {
db.intern_enum(loc)
}
fn lookup_intern(self, db: &impl DefDatabase) -> ItemLoc<ast::EnumDef> {
db.lookup_intern_enum(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ConstId(salsa::InternId);
impl_intern_key!(ConstId);
impl AstItemDef<ast::ConstDef> for ConstId {
fn intern(db: &impl DefDatabase, loc: ItemLoc<ast::ConstDef>) -> Self {
db.intern_const(loc)
}
fn lookup_intern(self, db: &impl DefDatabase) -> ItemLoc<ast::ConstDef> {
db.lookup_intern_const(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StaticId(salsa::InternId);
impl_intern_key!(StaticId);
impl AstItemDef<ast::StaticDef> for StaticId {
fn intern(db: &impl DefDatabase, loc: ItemLoc<ast::StaticDef>) -> Self {
db.intern_static(loc)
}
fn lookup_intern(self, db: &impl DefDatabase) -> ItemLoc<ast::StaticDef> {
db.lookup_intern_static(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TraitId(salsa::InternId);
impl_intern_key!(TraitId);
impl AstItemDef<ast::TraitDef> for TraitId {
fn intern(db: &impl DefDatabase, loc: ItemLoc<ast::TraitDef>) -> Self {
db.intern_trait(loc)
}
fn lookup_intern(self, db: &impl DefDatabase) -> ItemLoc<ast::TraitDef> {
db.lookup_intern_trait(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeAliasId(salsa::InternId);
impl_intern_key!(TypeAliasId);
impl AstItemDef<ast::TypeAliasDef> for TypeAliasId {
fn intern(db: &impl DefDatabase, loc: ItemLoc<ast::TypeAliasDef>) -> Self {
db.intern_type_alias(loc)
}
fn lookup_intern(self, db: &impl DefDatabase) -> ItemLoc<ast::TypeAliasDef> {
db.lookup_intern_type_alias(self)
}
}
/// This exists just for Chalk, because Chalk just has a single `StructId` where /// This exists just for Chalk, because Chalk just has a single `StructId` where
/// we have different kinds of ADTs, primitive types and special type /// we have different kinds of ADTs, primitive types and special type
/// constructors like tuples and function pointers. /// constructors like tuples and function pointers.

View file

@ -3,6 +3,8 @@
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use std::sync::Arc; use std::sync::Arc;
use hir_def::{attr::Attr, type_ref::TypeRef};
use hir_expand::hygiene::Hygiene;
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_syntax::{ use ra_syntax::{
@ -11,7 +13,6 @@ use ra_syntax::{
}; };
use crate::{ use crate::{
attr::Attr,
code_model::{Module, ModuleSource}, code_model::{Module, ModuleSource},
db::{AstDatabase, DefDatabase, HirDatabase}, db::{AstDatabase, DefDatabase, HirDatabase},
generics::HasGenericParams, generics::HasGenericParams,
@ -19,8 +20,7 @@ use crate::{
ids::MacroCallLoc, ids::MacroCallLoc,
resolve::Resolver, resolve::Resolver,
ty::Ty, ty::Ty,
type_ref::TypeRef, AssocItem, AstId, Const, Function, HasSource, HirFileId, MacroFileKind, Path, Source, TraitRef,
AssocItem, Const, Function, HasSource, HirFileId, MacroFileKind, Path, Source, TraitRef,
TypeAlias, TypeAlias,
}; };
@ -129,7 +129,7 @@ impl ImplData {
) -> Self { ) -> Self {
let target_trait = node.target_trait().map(TypeRef::from_ast); let target_trait = node.target_trait().map(TypeRef::from_ast);
let target_type = TypeRef::from_ast_opt(node.target_type()); let target_type = TypeRef::from_ast_opt(node.target_type());
let ctx = LocationCtx::new(db, module, file_id); let ctx = LocationCtx::new(db, module.id, file_id);
let negative = node.is_negative(); let negative = node.is_negative();
let items = if let Some(item_list) = node.item_list() { let items = if let Some(item_list) = node.item_list() {
item_list item_list
@ -182,7 +182,7 @@ impl ModuleImplBlocks {
) -> (Arc<ModuleImplBlocks>, Arc<ImplSourceMap>) { ) -> (Arc<ModuleImplBlocks>, Arc<ImplSourceMap>) {
let mut source_map = ImplSourceMap::default(); let mut source_map = ImplSourceMap::default();
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
let cfg_options = crate_graph.cfg_options(module.krate.crate_id()); let cfg_options = crate_graph.cfg_options(module.id.krate);
let result = ModuleImplBlocks::collect(db, cfg_options, module, &mut source_map); let result = ModuleImplBlocks::collect(db, cfg_options, module, &mut source_map);
(Arc::new(result), Arc::new(source_map)) (Arc::new(result), Arc::new(source_map))
@ -228,10 +228,11 @@ impl ModuleImplBlocks {
owner: &dyn ast::ModuleItemOwner, owner: &dyn ast::ModuleItemOwner,
file_id: HirFileId, file_id: HirFileId,
) { ) {
let hygiene = Hygiene::new(db, file_id);
for item in owner.items_with_macros() { for item in owner.items_with_macros() {
match item { match item {
ast::ItemOrMacro::Item(ast::ModuleItem::ImplBlock(impl_block_ast)) => { ast::ItemOrMacro::Item(ast::ModuleItem::ImplBlock(impl_block_ast)) => {
let attrs = Attr::from_attrs_owner(file_id, &impl_block_ast, db); let attrs = Attr::from_attrs_owner(&impl_block_ast, &hygiene);
if attrs.map_or(false, |attrs| { if attrs.map_or(false, |attrs| {
attrs.iter().any(|attr| attr.is_cfg_enabled(cfg_options) == Some(false)) attrs.iter().any(|attr| attr.is_cfg_enabled(cfg_options) == Some(false))
}) { }) {
@ -248,7 +249,7 @@ impl ModuleImplBlocks {
} }
ast::ItemOrMacro::Item(_) => (), ast::ItemOrMacro::Item(_) => (),
ast::ItemOrMacro::Macro(macro_call) => { ast::ItemOrMacro::Macro(macro_call) => {
let attrs = Attr::from_attrs_owner(file_id, &macro_call, db); let attrs = Attr::from_attrs_owner(&macro_call, &hygiene);
if attrs.map_or(false, |attrs| { if attrs.map_or(false, |attrs| {
attrs.iter().any(|attr| attr.is_cfg_enabled(cfg_options) == Some(false)) attrs.iter().any(|attr| attr.is_cfg_enabled(cfg_options) == Some(false))
}) { }) {
@ -256,14 +257,13 @@ impl ModuleImplBlocks {
} }
//FIXME: we should really cut down on the boilerplate required to process a macro //FIXME: we should really cut down on the boilerplate required to process a macro
let ast_id = db.ast_id_map(file_id).ast_id(&macro_call).with_file_id(file_id); let ast_id = AstId::new(file_id, db.ast_id_map(file_id).ast_id(&macro_call));
if let Some(path) = macro_call if let Some(path) =
.path() macro_call.path().and_then(|path| Path::from_src(path, &hygiene))
.and_then(|path| Path::from_src(Source { ast: path, file_id }, db))
{ {
if let Some(def) = self.module.resolver(db).resolve_path_as_macro(db, &path) if let Some(def) = self.module.resolver(db).resolve_path_as_macro(db, &path)
{ {
let call_id = MacroCallLoc { def: def.id, ast_id }.id(db); let call_id = db.intern_macro(MacroCallLoc { def: def.id, ast_id });
let file_id = call_id.as_file(MacroFileKind::Items); let file_id = call_id.as_file(MacroFileKind::Items);
if let Some(item_list) = if let Some(item_list) =
db.parse_or_expand(file_id).and_then(ast::MacroItems::cast) db.parse_or_expand(file_id).and_then(ast::MacroItems::cast)

View file

@ -22,14 +22,14 @@ pub enum LangItemTarget {
impl LangItemTarget { impl LangItemTarget {
pub(crate) fn krate(&self, db: &impl HirDatabase) -> Option<Crate> { pub(crate) fn krate(&self, db: &impl HirDatabase) -> Option<Crate> {
match self { Some(match self {
LangItemTarget::Enum(e) => e.module(db).krate(db), LangItemTarget::Enum(e) => e.module(db).krate(),
LangItemTarget::Function(f) => f.module(db).krate(db), LangItemTarget::Function(f) => f.module(db).krate(),
LangItemTarget::ImplBlock(i) => i.module().krate(db), LangItemTarget::ImplBlock(i) => i.module().krate(),
LangItemTarget::Static(s) => s.module(db).krate(db), LangItemTarget::Static(s) => s.module(db).krate(),
LangItemTarget::Struct(s) => s.module(db).krate(db), LangItemTarget::Struct(s) => s.module(db).krate(),
LangItemTarget::Trait(t) => t.module(db).krate(db), LangItemTarget::Trait(t) => t.module(db).krate(),
} })
} }
} }

View file

@ -26,25 +26,19 @@ macro_rules! impl_froms {
} }
} }
mod either;
pub mod debug; pub mod debug;
pub mod db; pub mod db;
#[macro_use] #[macro_use]
pub mod mock; pub mod mock;
mod path;
pub mod source_binder; pub mod source_binder;
mod source_id;
mod ids; mod ids;
mod name;
mod nameres; mod nameres;
mod adt; mod adt;
mod traits; mod traits;
mod type_alias; mod type_alias;
mod type_ref;
mod ty; mod ty;
mod attr;
mod impl_block; mod impl_block;
mod expr; mod expr;
mod lang_item; mod lang_item;
@ -60,37 +54,35 @@ pub mod from_source;
#[cfg(test)] #[cfg(test)]
mod marks; mod marks;
use crate::{ use hir_expand::AstId;
ids::MacroFileKind,
name::AsName,
resolve::Resolver,
source_id::{AstId, FileAstId},
};
pub use self::{ use crate::{ids::MacroFileKind, resolve::Resolver};
pub use crate::{
adt::VariantDef, adt::VariantDef,
either::Either, code_model::{
docs::{DocDef, Docs, Documentation},
src::{HasBodySource, HasSource, Source},
Adt, AssocItem, Const, ConstData, Container, Crate, CrateDependency, DefWithBody, Enum,
EnumVariant, FieldSource, FnData, Function, HasBody, MacroDef, Module, ModuleDef,
ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union,
},
expr::ExprScopes, expr::ExprScopes,
from_source::FromSource, from_source::FromSource,
generics::{GenericDef, GenericParam, GenericParams, HasGenericParams}, generics::{GenericDef, GenericParam, GenericParams, HasGenericParams},
ids::{HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile}, ids::{HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile},
impl_block::ImplBlock, impl_block::ImplBlock,
name::Name,
nameres::{ImportId, Namespace, PerNs}, nameres::{ImportId, Namespace, PerNs},
path::{Path, PathKind},
resolve::ScopeDef, resolve::ScopeDef,
source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
source_id::{AstIdMap, ErasedFileAstId},
ty::{ ty::{
display::HirDisplay, ApplicationTy, CallableDef, Substs, TraitRef, Ty, TypeCtor, TypeWalk, display::HirDisplay, ApplicationTy, CallableDef, Substs, TraitRef, Ty, TypeCtor, TypeWalk,
}, },
type_ref::Mutability,
}; };
pub use self::code_model::{ pub use hir_def::{
docs::{DocDef, Docs, Documentation}, builtin_type::BuiltinType,
src::{HasBodySource, HasSource, Source}, path::{Path, PathKind},
Adt, AssocItem, BuiltinType, Const, ConstData, Container, Crate, CrateDependency, DefWithBody, type_ref::Mutability,
Enum, EnumVariant, FieldSource, FnData, Function, HasBody, MacroDef, Module, ModuleDef,
ModuleSource, Static, Struct, StructField, Trait, TypeAlias, Union,
}; };
pub use hir_expand::{either::Either, name::Name};

View file

@ -2,6 +2,7 @@
test_utils::marks!( test_utils::marks!(
bogus_paths bogus_paths
// FIXME: restore this mark once hir is split
name_res_works_for_broken_modules name_res_works_for_broken_modules
can_import_enum_variant can_import_enum_variant
type_var_cycles_resolve_completely type_var_cycles_resolve_completely

View file

@ -22,6 +22,7 @@ pub const WORKSPACE: SourceRootId = SourceRootId(0);
db::InternDatabaseStorage, db::InternDatabaseStorage,
db::AstDatabaseStorage, db::AstDatabaseStorage,
db::DefDatabaseStorage, db::DefDatabaseStorage,
db::DefDatabase2Storage,
db::HirDatabaseStorage db::HirDatabaseStorage
)] )]
#[derive(Debug)] #[derive(Debug)]

View file

@ -48,16 +48,15 @@
//! on the result //! on the result
mod per_ns; mod per_ns;
mod raw;
mod collector; mod collector;
mod mod_resolution;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
use std::sync::Arc; use std::sync::Arc;
use hir_def::{builtin_type::BuiltinType, CrateModuleId};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use ra_arena::{impl_arena_id, Arena, RawId}; use ra_arena::Arena;
use ra_db::{Edition, FileId}; use ra_db::{Edition, FileId};
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::ast; use ra_syntax::ast;
@ -69,16 +68,12 @@ use crate::{
diagnostics::DiagnosticSink, diagnostics::DiagnosticSink,
ids::MacroDefId, ids::MacroDefId,
nameres::diagnostics::DefDiagnostic, nameres::diagnostics::DefDiagnostic,
Adt, AstId, BuiltinType, Crate, HirFileId, MacroDef, Module, ModuleDef, Name, Path, PathKind, Adt, AstId, Crate, HirFileId, MacroDef, Module, ModuleDef, Name, Path, PathKind, Trait,
Trait,
}; };
pub(crate) use self::raw::{ImportSourceMap, RawItems}; pub use self::per_ns::{Namespace, PerNs};
pub use self::{ pub use hir_def::nameres::raw::ImportId;
per_ns::{Namespace, PerNs},
raw::ImportId,
};
/// Contains all top-level defs from a macro-expanded crate /// Contains all top-level defs from a macro-expanded crate
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
@ -115,13 +110,8 @@ impl std::ops::Index<CrateModuleId> for CrateDefMap {
} }
} }
/// An ID of a module, **local** to a specific crate
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub(crate) struct CrateModuleId(RawId);
impl_arena_id!(CrateModuleId);
#[derive(Default, Debug, PartialEq, Eq)] #[derive(Default, Debug, PartialEq, Eq)]
pub(crate) struct ModuleData { pub struct ModuleData {
pub(crate) parent: Option<CrateModuleId>, pub(crate) parent: Option<CrateModuleId>,
pub(crate) children: FxHashMap<Name, CrateModuleId>, pub(crate) children: FxHashMap<Name, CrateModuleId>,
pub(crate) scope: ModuleScope, pub(crate) scope: ModuleScope,
@ -332,10 +322,11 @@ impl CrateDefMap {
) -> ResolvePathResult { ) -> ResolvePathResult {
let mut segments = path.segments.iter().enumerate(); let mut segments = path.segments.iter().enumerate();
let mut curr_per_ns: PerNs = match path.kind { let mut curr_per_ns: PerNs = match path.kind {
PathKind::DollarCrate(krate) => { PathKind::DollarCrate(crate_id) => {
let krate = Crate { crate_id };
if krate == self.krate { if krate == self.krate {
tested_by!(macro_dollar_crate_self); tested_by!(macro_dollar_crate_self);
PerNs::types(Module { krate: self.krate, module_id: self.root }.into()) PerNs::types(Module::new(self.krate, self.root).into())
} else { } else {
match krate.root_module(db) { match krate.root_module(db) {
Some(module) => { Some(module) => {
@ -346,12 +337,8 @@ impl CrateDefMap {
} }
} }
} }
PathKind::Crate => { PathKind::Crate => PerNs::types(Module::new(self.krate, self.root).into()),
PerNs::types(Module { krate: self.krate, module_id: self.root }.into()) PathKind::Self_ => PerNs::types(Module::new(self.krate, original_module).into()),
}
PathKind::Self_ => {
PerNs::types(Module { krate: self.krate, module_id: original_module }.into())
}
// plain import or absolute path in 2015: crate-relative with // plain import or absolute path in 2015: crate-relative with
// fallback to extern prelude (with the simplification in // fallback to extern prelude (with the simplification in
// rust-lang/rust#57745) // rust-lang/rust#57745)
@ -377,7 +364,7 @@ impl CrateDefMap {
} }
PathKind::Super => { PathKind::Super => {
if let Some(p) = self.modules[original_module].parent { if let Some(p) = self.modules[original_module].parent {
PerNs::types(Module { krate: self.krate, module_id: p }.into()) PerNs::types(Module::new(self.krate, p).into())
} else { } else {
log::debug!("super path in root module"); log::debug!("super path in root module");
return ResolvePathResult::empty(ReachedFixedPoint::Yes); return ResolvePathResult::empty(ReachedFixedPoint::Yes);
@ -419,12 +406,12 @@ impl CrateDefMap {
curr_per_ns = match curr { curr_per_ns = match curr {
ModuleDef::Module(module) => { ModuleDef::Module(module) => {
if module.krate != self.krate { if module.krate() != self.krate {
let path = let path =
Path { segments: path.segments[i..].to_vec(), kind: PathKind::Self_ }; Path { segments: path.segments[i..].to_vec(), kind: PathKind::Self_ };
log::debug!("resolving {:?} in other crate", path); log::debug!("resolving {:?} in other crate", path);
let defp_map = db.crate_def_map(module.krate); let defp_map = db.crate_def_map(module.krate());
let (def, s) = defp_map.resolve_path(db, module.module_id, &path); let (def, s) = defp_map.resolve_path(db, module.id.module_id, &path);
return ResolvePathResult::with( return ResolvePathResult::with(
def, def,
ReachedFixedPoint::Yes, ReachedFixedPoint::Yes,
@ -433,7 +420,7 @@ impl CrateDefMap {
} }
// Since it is a qualified path here, it should not contains legacy macros // Since it is a qualified path here, it should not contains legacy macros
match self[module.module_id].scope.get(&segment.name) { match self[module.id.module_id].scope.get(&segment.name) {
Some(res) => res.def, Some(res) => res.def,
_ => { _ => {
log::debug!("path segment {:?} not found", segment.name); log::debug!("path segment {:?} not found", segment.name);
@ -511,14 +498,14 @@ impl CrateDefMap {
fn resolve_in_prelude(&self, db: &impl DefDatabase, name: &Name) -> PerNs { fn resolve_in_prelude(&self, db: &impl DefDatabase, name: &Name) -> PerNs {
if let Some(prelude) = self.prelude { if let Some(prelude) = self.prelude {
let keep; let keep;
let def_map = if prelude.krate == self.krate { let def_map = if prelude.krate() == self.krate {
self self
} else { } else {
// Extend lifetime // Extend lifetime
keep = db.crate_def_map(prelude.krate); keep = db.crate_def_map(prelude.krate());
&keep &keep
}; };
def_map[prelude.module_id].scope.get(name).map_or_else(PerNs::none, |res| res.def) def_map[prelude.id.module_id].scope.get(name).map_or_else(PerNs::none, |res| res.def)
} else { } else {
PerNs::none() PerNs::none()
} }

View file

@ -1,5 +1,10 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir_def::{
attr::Attr,
nameres::{mod_resolution::ModDir, raw},
};
use hir_expand::name;
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_db::FileId; use ra_db::FileId;
use ra_syntax::{ast, SmolStr}; use ra_syntax::{ast, SmolStr};
@ -7,13 +12,11 @@ use rustc_hash::FxHashMap;
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{
attr::Attr,
db::DefDatabase, db::DefDatabase,
ids::{AstItemDef, LocationCtx, MacroCallId, MacroCallLoc, MacroDefId, MacroFileKind}, ids::{AstItemDef, LocationCtx, MacroCallId, MacroCallLoc, MacroDefId, MacroFileKind},
name::MACRO_RULES,
nameres::{ nameres::{
diagnostics::DefDiagnostic, mod_resolution::ModDir, raw, Crate, CrateDefMap, CrateModuleId, diagnostics::DefDiagnostic, Crate, CrateDefMap, CrateModuleId, ModuleData, ModuleDef,
ModuleData, ModuleDef, PerNs, ReachedFixedPoint, Resolution, ResolveMode, PerNs, ReachedFixedPoint, Resolution, ResolveMode,
}, },
Adt, AstId, Const, Enum, Function, HirFileId, MacroDef, Module, Name, Path, PathKind, Static, Adt, AstId, Const, Enum, Function, HirFileId, MacroDef, Module, Name, Path, PathKind, Static,
Struct, Trait, TypeAlias, Union, Struct, Trait, TypeAlias, Union,
@ -212,7 +215,7 @@ where
if let Some(ModuleDef::Module(m)) = res.take_types() { if let Some(ModuleDef::Module(m)) = res.take_types() {
tested_by!(macro_rules_from_other_crates_are_visible_with_macro_use); tested_by!(macro_rules_from_other_crates_are_visible_with_macro_use);
self.import_all_macros_exported(current_module_id, m.krate); self.import_all_macros_exported(current_module_id, m.krate());
} }
} }
@ -289,11 +292,11 @@ where
if import.is_prelude { if import.is_prelude {
tested_by!(std_prelude); tested_by!(std_prelude);
self.def_map.prelude = Some(m); self.def_map.prelude = Some(m);
} else if m.krate != self.def_map.krate { } else if m.krate() != self.def_map.krate {
tested_by!(glob_across_crates); tested_by!(glob_across_crates);
// glob import from other crate => we can just import everything once // glob import from other crate => we can just import everything once
let item_map = self.db.crate_def_map(m.krate); let item_map = self.db.crate_def_map(m.krate());
let scope = &item_map[m.module_id].scope; let scope = &item_map[m.id.module_id].scope;
// Module scoped macros is included // Module scoped macros is included
let items = scope let items = scope
@ -307,7 +310,7 @@ where
// glob import from same crate => we do an initial // glob import from same crate => we do an initial
// import, and then need to propagate any further // import, and then need to propagate any further
// additions // additions
let scope = &self.def_map[m.module_id].scope; let scope = &self.def_map[m.id.module_id].scope;
// Module scoped macros is included // Module scoped macros is included
let items = scope let items = scope
@ -319,7 +322,7 @@ where
self.update(module_id, Some(import_id), &items); self.update(module_id, Some(import_id), &items);
// record the glob import in case we add further items // record the glob import in case we add further items
self.glob_imports self.glob_imports
.entry(m.module_id) .entry(m.id.module_id)
.or_default() .or_default()
.push((module_id, import_id)); .push((module_id, import_id));
} }
@ -448,7 +451,7 @@ where
); );
if let Some(def) = resolved_res.resolved_def.get_macros() { if let Some(def) = resolved_res.resolved_def.get_macros() {
let call_id = MacroCallLoc { def: def.id, ast_id: *ast_id }.id(self.db); let call_id = self.db.intern_macro(MacroCallLoc { def: def.id, ast_id: *ast_id });
resolved.push((*module_id, call_id, def.id)); resolved.push((*module_id, call_id, def.id));
res = ReachedFixedPoint::No; res = ReachedFixedPoint::No;
return false; return false;
@ -523,9 +526,10 @@ where
// Prelude module is always considered to be `#[macro_use]`. // Prelude module is always considered to be `#[macro_use]`.
if let Some(prelude_module) = self.def_collector.def_map.prelude { if let Some(prelude_module) = self.def_collector.def_map.prelude {
if prelude_module.krate != self.def_collector.def_map.krate { if prelude_module.krate() != self.def_collector.def_map.krate {
tested_by!(prelude_is_macro_use); tested_by!(prelude_is_macro_use);
self.def_collector.import_all_macros_exported(self.module_id, prelude_module.krate); self.def_collector
.import_all_macros_exported(self.module_id, prelude_module.krate());
} }
} }
@ -567,7 +571,7 @@ where
// inline module, just recurse // inline module, just recurse
raw::ModuleData::Definition { name, items, ast_id } => { raw::ModuleData::Definition { name, items, ast_id } => {
let module_id = let module_id =
self.push_child_module(name.clone(), ast_id.with_file_id(self.file_id), None); self.push_child_module(name.clone(), AstId::new(self.file_id, *ast_id), None);
ModCollector { ModCollector {
def_collector: &mut *self.def_collector, def_collector: &mut *self.def_collector,
@ -583,7 +587,7 @@ where
} }
// out of line module, resolve, parse and recurse // out of line module, resolve, parse and recurse
raw::ModuleData::Declaration { name, ast_id } => { raw::ModuleData::Declaration { name, ast_id } => {
let ast_id = ast_id.with_file_id(self.file_id); let ast_id = AstId::new(self.file_id, *ast_id);
match self.mod_dir.resolve_declaration( match self.mod_dir.resolve_declaration(
self.def_collector.db, self.def_collector.db,
self.file_id, self.file_id,
@ -631,9 +635,7 @@ where
modules[res].scope.legacy_macros = modules[self.module_id].scope.legacy_macros.clone(); modules[res].scope.legacy_macros = modules[self.module_id].scope.legacy_macros.clone();
modules[self.module_id].children.insert(name.clone(), res); modules[self.module_id].children.insert(name.clone(), res);
let resolution = Resolution { let resolution = Resolution {
def: PerNs::types( def: PerNs::types(Module::new(self.def_collector.def_map.krate, res).into()),
Module { krate: self.def_collector.def_map.krate, module_id: res }.into(),
),
import: None, import: None,
}; };
self.def_collector.update(self.module_id, None, &[(name, resolution)]); self.def_collector.update(self.module_id, None, &[(name, resolution)]);
@ -641,8 +643,8 @@ where
} }
fn define_def(&mut self, def: &raw::DefData) { fn define_def(&mut self, def: &raw::DefData) {
let module = Module { krate: self.def_collector.def_map.krate, module_id: self.module_id }; let module = Module::new(self.def_collector.def_map.krate, self.module_id);
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id); let ctx = LocationCtx::new(self.def_collector.db, module.id, self.file_id);
macro_rules! def { macro_rules! def {
($kind:ident, $ast_id:ident) => { ($kind:ident, $ast_id:ident) => {
@ -671,28 +673,26 @@ where
} }
fn collect_macro(&mut self, mac: &raw::MacroData) { fn collect_macro(&mut self, mac: &raw::MacroData) {
let ast_id = AstId::new(self.file_id, mac.ast_id);
// Case 1: macro rules, define a macro in crate-global mutable scope // Case 1: macro rules, define a macro in crate-global mutable scope
if is_macro_rules(&mac.path) { if is_macro_rules(&mac.path) {
if let Some(name) = &mac.name { if let Some(name) = &mac.name {
let macro_id = MacroDefId { let macro_id =
ast_id: mac.ast_id.with_file_id(self.file_id), MacroDefId { ast_id, krate: self.def_collector.def_map.krate.crate_id };
krate: self.def_collector.def_map.krate,
};
let macro_ = MacroDef { id: macro_id }; let macro_ = MacroDef { id: macro_id };
self.def_collector.define_macro(self.module_id, name.clone(), macro_, mac.export); self.def_collector.define_macro(self.module_id, name.clone(), macro_, mac.export);
} }
return; return;
} }
let ast_id = mac.ast_id.with_file_id(self.file_id);
// Case 2: try to resolve in legacy scope and expand macro_rules, triggering // Case 2: try to resolve in legacy scope and expand macro_rules, triggering
// recursive item collection. // recursive item collection.
if let Some(macro_def) = mac.path.as_ident().and_then(|name| { if let Some(macro_def) = mac.path.as_ident().and_then(|name| {
self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name) self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name)
}) { }) {
let def = macro_def.id; let def = macro_def.id;
let macro_call_id = MacroCallLoc { def, ast_id }.id(self.def_collector.db); let macro_call_id = self.def_collector.db.intern_macro(MacroCallLoc { def, ast_id });
self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, def); self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, def);
return; return;
@ -728,7 +728,7 @@ where
} }
fn is_macro_rules(path: &Path) -> bool { fn is_macro_rules(path: &Path) -> bool {
path.as_ident() == Some(&MACRO_RULES) path.as_ident() == Some(&name::MACRO_RULES)
} }
#[cfg(test)] #[cfg(test)]

View file

@ -2,7 +2,7 @@ use super::*;
#[test] #[test]
fn name_res_works_for_broken_modules() { fn name_res_works_for_broken_modules() {
covers!(name_res_works_for_broken_modules); // covers!(name_res_works_for_broken_modules);
let map = def_map( let map = def_map(
" "
//- /lib.rs //- /lib.rs

View file

@ -1,6 +1,12 @@
//! Name resolution. //! Name resolution.
use std::sync::Arc; use std::sync::Arc;
use hir_def::{
builtin_type::BuiltinType,
path::{Path, PathKind},
CrateModuleId,
};
use hir_expand::name::{self, Name};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use crate::{ use crate::{
@ -12,11 +18,8 @@ use crate::{
}, },
generics::GenericParams, generics::GenericParams,
impl_block::ImplBlock, impl_block::ImplBlock,
name::{Name, SELF_PARAM, SELF_TYPE}, nameres::{CrateDefMap, PerNs},
nameres::{CrateDefMap, CrateModuleId, PerNs}, Adt, Const, Enum, EnumVariant, Function, MacroDef, ModuleDef, Static, Struct, Trait, TypeAlias,
path::{Path, PathKind},
Adt, BuiltinType, Const, Enum, EnumVariant, Function, MacroDef, ModuleDef, Static, Struct,
Trait, TypeAlias,
}; };
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
@ -149,13 +152,13 @@ impl Resolver {
} }
} }
Scope::ImplBlockScope(impl_) => { Scope::ImplBlockScope(impl_) => {
if first_name == &SELF_TYPE { if first_name == &name::SELF_TYPE {
let idx = if path.segments.len() == 1 { None } else { Some(1) }; let idx = if path.segments.len() == 1 { None } else { Some(1) };
return Some((TypeNs::SelfType(*impl_), idx)); return Some((TypeNs::SelfType(*impl_), idx));
} }
} }
Scope::AdtScope(adt) => { Scope::AdtScope(adt) => {
if first_name == &SELF_TYPE { if first_name == &name::SELF_TYPE {
let idx = if path.segments.len() == 1 { None } else { Some(1) }; let idx = if path.segments.len() == 1 { None } else { Some(1) };
return Some((TypeNs::AdtSelfType(*adt), idx)); return Some((TypeNs::AdtSelfType(*adt), idx));
} }
@ -204,7 +207,7 @@ impl Resolver {
return None; return None;
} }
let n_segments = path.segments.len(); let n_segments = path.segments.len();
let tmp = SELF_PARAM; let tmp = name::SELF_PARAM;
let first_name = if path.is_self() { &tmp } else { &path.segments.first()?.name }; let first_name = if path.is_self() { &tmp } else { &path.segments.first()?.name };
let skip_to_mod = path.kind != PathKind::Plain && !path.is_self(); let skip_to_mod = path.kind != PathKind::Plain && !path.is_self();
for scope in self.scopes.iter().rev() { for scope in self.scopes.iter().rev() {
@ -240,13 +243,13 @@ impl Resolver {
Scope::GenericParams(_) => continue, Scope::GenericParams(_) => continue,
Scope::ImplBlockScope(impl_) if n_segments > 1 => { Scope::ImplBlockScope(impl_) if n_segments > 1 => {
if first_name == &SELF_TYPE { if first_name == &name::SELF_TYPE {
let ty = TypeNs::SelfType(*impl_); let ty = TypeNs::SelfType(*impl_);
return Some(ResolveValueResult::Partial(ty, 1)); return Some(ResolveValueResult::Partial(ty, 1));
} }
} }
Scope::AdtScope(adt) if n_segments > 1 => { Scope::AdtScope(adt) if n_segments > 1 => {
if first_name == &SELF_TYPE { if first_name == &name::SELF_TYPE {
let ty = TypeNs::AdtSelfType(*adt); let ty = TypeNs::AdtSelfType(*adt);
return Some(ResolveValueResult::Partial(ty, 1)); return Some(ResolveValueResult::Partial(ty, 1));
} }
@ -330,8 +333,8 @@ impl Resolver {
for scope in &self.scopes { for scope in &self.scopes {
if let Scope::ModuleScope(m) = scope { if let Scope::ModuleScope(m) = scope {
if let Some(prelude) = m.crate_def_map.prelude() { if let Some(prelude) = m.crate_def_map.prelude() {
let prelude_def_map = db.crate_def_map(prelude.krate); let prelude_def_map = db.crate_def_map(prelude.krate());
traits.extend(prelude_def_map[prelude.module_id].scope.traits()); traits.extend(prelude_def_map[prelude.id.module_id].scope.traits());
} }
traits.extend(m.crate_def_map[m.module_id].scope.traits()); traits.extend(m.crate_def_map[m.module_id].scope.traits());
} }
@ -444,10 +447,12 @@ impl Scope {
f(name.clone(), ScopeDef::ModuleDef(*def)); f(name.clone(), ScopeDef::ModuleDef(*def));
}); });
if let Some(prelude) = m.crate_def_map.prelude() { if let Some(prelude) = m.crate_def_map.prelude() {
let prelude_def_map = db.crate_def_map(prelude.krate); let prelude_def_map = db.crate_def_map(prelude.krate());
prelude_def_map[prelude.module_id].scope.entries().for_each(|(name, res)| { prelude_def_map[prelude.id.module_id].scope.entries().for_each(
f(name.clone(), res.def.into()); |(name, res)| {
}); f(name.clone(), res.def.into());
},
);
} }
} }
Scope::GenericParams(gp) => { Scope::GenericParams(gp) => {
@ -456,10 +461,10 @@ impl Scope {
} }
} }
Scope::ImplBlockScope(i) => { Scope::ImplBlockScope(i) => {
f(SELF_TYPE, ScopeDef::ImplSelfType(*i)); f(name::SELF_TYPE, ScopeDef::ImplSelfType(*i));
} }
Scope::AdtScope(i) => { Scope::AdtScope(i) => {
f(SELF_TYPE, ScopeDef::AdtSelfType(*i)); f(name::SELF_TYPE, ScopeDef::AdtSelfType(*i));
} }
Scope::ExprScope(e) => { Scope::ExprScope(e) => {
e.expr_scopes.entries(e.scope_id).iter().for_each(|e| { e.expr_scopes.entries(e.scope_id).iter().for_each(|e| {

View file

@ -7,10 +7,12 @@
//! purely for "IDE needs". //! purely for "IDE needs".
use std::sync::Arc; use std::sync::Arc;
use hir_def::path::known;
use hir_expand::name::AsName;
use ra_db::FileId; use ra_db::FileId;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
AstPtr, match_ast, AstPtr,
SyntaxKind::*, SyntaxKind::*,
SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
}; };
@ -24,11 +26,10 @@ use crate::{
BodySourceMap, BodySourceMap,
}, },
ids::LocationCtx, ids::LocationCtx,
path::known,
resolve::{ScopeDef, TypeNs, ValueNs}, resolve::{ScopeDef, TypeNs, ValueNs},
ty::method_resolution::implements_trait, ty::method_resolution::implements_trait,
AsName, Const, DefWithBody, Either, Enum, FromSource, Function, HasBody, HirFileId, MacroDef, Const, DefWithBody, Either, Enum, FromSource, Function, HasBody, HirFileId, MacroDef, Module,
Module, Name, Path, Resolver, Static, Struct, Ty, Name, Path, Resolver, Static, Struct, Ty,
}; };
fn try_get_resolver_for_node( fn try_get_resolver_for_node(
@ -36,24 +37,34 @@ fn try_get_resolver_for_node(
file_id: FileId, file_id: FileId,
node: &SyntaxNode, node: &SyntaxNode,
) -> Option<Resolver> { ) -> Option<Resolver> {
if let Some(module) = ast::Module::cast(node.clone()) { match_ast! {
let src = crate::Source { file_id: file_id.into(), ast: module }; match node {
Some(crate::Module::from_declaration(db, src)?.resolver(db)) ast::Module(it) => {
} else if let Some(file) = ast::SourceFile::cast(node.clone()) { let src = crate::Source { file_id: file_id.into(), ast: it };
let src = Some(crate::Module::from_declaration(db, src)?.resolver(db))
crate::Source { file_id: file_id.into(), ast: crate::ModuleSource::SourceFile(file) }; },
Some(crate::Module::from_definition(db, src)?.resolver(db)) ast::SourceFile(it) => {
} else if let Some(s) = ast::StructDef::cast(node.clone()) { let src =
let src = crate::Source { file_id: file_id.into(), ast: s }; crate::Source { file_id: file_id.into(), ast: crate::ModuleSource::SourceFile(it) };
Some(Struct::from_source(db, src)?.resolver(db)) Some(crate::Module::from_definition(db, src)?.resolver(db))
} else if let Some(e) = ast::EnumDef::cast(node.clone()) { },
let src = crate::Source { file_id: file_id.into(), ast: e }; ast::StructDef(it) => {
Some(Enum::from_source(db, src)?.resolver(db)) let src = crate::Source { file_id: file_id.into(), ast: it };
} else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { Some(Struct::from_source(db, src)?.resolver(db))
Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db)) },
} else { ast::EnumDef(it) => {
// FIXME add missing cases let src = crate::Source { file_id: file_id.into(), ast: it };
None Some(Enum::from_source(db, src)?.resolver(db))
},
_ => {
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db))
} else {
// FIXME add missing cases
None
}
},
}
} }
} }
@ -64,19 +75,17 @@ fn def_with_body_from_child_node(
) -> Option<DefWithBody> { ) -> Option<DefWithBody> {
let src = crate::ModuleSource::from_child_node(db, file_id, node); let src = crate::ModuleSource::from_child_node(db, file_id, node);
let module = Module::from_definition(db, crate::Source { file_id: file_id.into(), ast: src })?; let module = Module::from_definition(db, crate::Source { file_id: file_id.into(), ast: src })?;
let ctx = LocationCtx::new(db, module, file_id.into()); let ctx = LocationCtx::new(db, module.id, file_id.into());
node.ancestors().find_map(|node| { node.ancestors().find_map(|node| {
if let Some(def) = ast::FnDef::cast(node.clone()) { match_ast! {
return Some(Function { id: ctx.to_def(&def) }.into()); match node {
ast::FnDef(def) => { Some(Function {id: ctx.to_def(&def) }.into()) },
ast::ConstDef(def) => { Some(Const { id: ctx.to_def(&def) }.into()) },
ast::StaticDef(def) => { Some(Static { id: ctx.to_def(&def) }.into()) },
_ => { None },
}
} }
if let Some(def) = ast::ConstDef::cast(node.clone()) {
return Some(Const { id: ctx.to_def(&def) }.into());
}
if let Some(def) = ast::StaticDef::cast(node) {
return Some(Static { id: ctx.to_def(&def) }.into());
}
None
}) })
} }

View file

@ -1,14 +1,15 @@
//! HIR for trait definitions. //! HIR for trait definitions.
use rustc_hash::FxHashMap;
use std::sync::Arc; use std::sync::Arc;
use hir_expand::name::AsName;
use ra_syntax::ast::{self, NameOwner}; use ra_syntax::ast::{self, NameOwner};
use rustc_hash::FxHashMap;
use crate::{ use crate::{
db::{AstDatabase, DefDatabase}, db::{AstDatabase, DefDatabase},
ids::LocationCtx, ids::LocationCtx,
name::AsName,
AssocItem, Const, Function, HasSource, Module, Name, Trait, TypeAlias, AssocItem, Const, Function, HasSource, Module, Name, Trait, TypeAlias,
}; };
@ -27,7 +28,7 @@ impl TraitData {
let src = tr.source(db); let src = tr.source(db);
let name = src.ast.name().map(|n| n.as_name()); let name = src.ast.name().map(|n| n.as_name());
let module = tr.module(db); let module = tr.module(db);
let ctx = LocationCtx::new(db, module, src.file_id); let ctx = LocationCtx::new(db, module.id, src.file_id);
let auto = src.ast.is_auto(); let auto = src.ast.is_auto();
let items = if let Some(item_list) = src.ast.item_list() { let items = if let Some(item_list) = src.ast.item_list() {
item_list item_list

View file

@ -17,8 +17,8 @@ use std::sync::Arc;
use std::{fmt, iter, mem}; use std::{fmt, iter, mem};
use crate::{ use crate::{
db::HirDatabase, expr::ExprId, type_ref::Mutability, util::make_mut_slice, Adt, Crate, db::HirDatabase, expr::ExprId, util::make_mut_slice, Adt, Crate, DefWithBody, GenericParams,
DefWithBody, GenericParams, HasGenericParams, Name, Trait, TypeAlias, HasGenericParams, Mutability, Name, Trait, TypeAlias,
}; };
use display::{HirDisplay, HirFormatter}; use display::{HirDisplay, HirFormatter};

View file

@ -5,10 +5,11 @@
use std::iter::successors; use std::iter::successors;
use hir_expand::name;
use log::{info, warn}; use log::{info, warn};
use super::{traits::Solution, Canonical, Substs, Ty, TypeWalk}; use super::{traits::Solution, Canonical, Substs, Ty, TypeWalk};
use crate::{db::HirDatabase, name, HasGenericParams, Resolver}; use crate::{db::HirDatabase, HasGenericParams, Resolver};
const AUTODEREF_RECURSION_LIMIT: usize = 10; const AUTODEREF_RECURSION_LIMIT: usize = 10;

View file

@ -21,6 +21,11 @@ use std::sync::Arc;
use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue}; use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use hir_def::{
path::known,
type_ref::{Mutability, TypeRef},
};
use hir_expand::name;
use ra_arena::map::ArenaMap; use ra_arena::map::ArenaMap;
use ra_prof::profile; use ra_prof::profile;
use test_utils::tested_by; use test_utils::tested_by;
@ -37,11 +42,8 @@ use crate::{
db::HirDatabase, db::HirDatabase,
diagnostics::DiagnosticSink, diagnostics::DiagnosticSink,
expr::{BindingAnnotation, Body, ExprId, PatId}, expr::{BindingAnnotation, Body, ExprId, PatId},
name,
path::known,
resolve::{Resolver, TypeNs}, resolve::{Resolver, TypeNs},
ty::infer::diagnostics::InferenceDiagnostic, ty::infer::diagnostics::InferenceDiagnostic,
type_ref::{Mutability, TypeRef},
Adt, AssocItem, ConstData, DefWithBody, FnData, Function, HasBody, Path, StructField, Adt, AssocItem, ConstData, DefWithBody, FnData, Function, HasBody, Path, StructField,
}; };

View file

@ -14,8 +14,7 @@ use crate::{
lang_item::LangItemTarget, lang_item::LangItemTarget,
resolve::Resolver, resolve::Resolver,
ty::{autoderef, Substs, Ty, TypeCtor, TypeWalk}, ty::{autoderef, Substs, Ty, TypeCtor, TypeWalk},
type_ref::Mutability, Adt, Mutability,
Adt,
}; };
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a, D: HirDatabase> InferenceContext<'a, D> {

View file

@ -3,14 +3,15 @@
use std::iter::{repeat, repeat_with}; use std::iter::{repeat, repeat_with};
use std::sync::Arc; use std::sync::Arc;
use hir_def::path::{GenericArg, GenericArgs};
use hir_expand::name;
use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch}; use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch};
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
expr::{self, Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp}, expr::{self, Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
generics::{GenericParams, HasGenericParams}, generics::{GenericParams, HasGenericParams},
name,
nameres::Namespace, nameres::Namespace,
path::{GenericArg, GenericArgs},
ty::{ ty::{
autoderef, method_resolution, op, primitive, CallableDef, InferTy, Mutability, Obligation, autoderef, method_resolution, op, primitive, CallableDef, InferTy, Mutability, Obligation,
ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk,

View file

@ -1,5 +1,7 @@
//! Path expression resolution. //! Path expression resolution.
use hir_def::path::PathSegment;
use super::{ExprOrPatId, InferenceContext, TraitRef}; use super::{ExprOrPatId, InferenceContext, TraitRef};
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
@ -131,7 +133,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn resolve_trait_assoc_item( fn resolve_trait_assoc_item(
&mut self, &mut self,
trait_ref: TraitRef, trait_ref: TraitRef,
segment: &crate::path::PathSegment, segment: &PathSegment,
id: ExprOrPatId, id: ExprOrPatId,
) -> Option<(ValueNs, Option<Substs>)> { ) -> Option<(ValueNs, Option<Substs>)> {
let trait_ = trait_ref.trait_; let trait_ = trait_ref.trait_;
@ -170,7 +172,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn resolve_ty_assoc_item( fn resolve_ty_assoc_item(
&mut self, &mut self,
ty: Ty, ty: Ty,
segment: &crate::path::PathSegment, segment: &PathSegment,
id: ExprOrPatId, id: ExprOrPatId,
) -> Option<(ValueNs, Option<Substs>)> { ) -> Option<(ValueNs, Option<Substs>)> {
if let Ty::Unknown = ty { if let Ty::Unknown = ty {

View file

@ -8,6 +8,12 @@
use std::iter; use std::iter;
use std::sync::Arc; use std::sync::Arc;
use hir_def::{
builtin_type::BuiltinType,
path::{GenericArg, PathSegment},
type_ref::{TypeBound, TypeRef},
};
use super::{ use super::{
FnSig, GenericPredicate, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, FnSig, GenericPredicate, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
TypeWalk, TypeWalk,
@ -18,13 +24,14 @@ use crate::{
generics::HasGenericParams, generics::HasGenericParams,
generics::{GenericDef, WherePredicate}, generics::{GenericDef, WherePredicate},
nameres::Namespace, nameres::Namespace,
path::{GenericArg, PathSegment},
resolve::{Resolver, TypeNs}, resolve::{Resolver, TypeNs},
ty::Adt, ty::{
type_ref::{TypeBound, TypeRef}, primitive::{FloatTy, IntTy},
Adt,
},
util::make_mut_slice, util::make_mut_slice,
BuiltinType, Const, Enum, EnumVariant, Function, ModuleDef, Path, Static, Struct, StructField, Const, Enum, EnumVariant, Function, ModuleDef, Path, Static, Struct, StructField, Trait,
Trait, TypeAlias, Union, TypeAlias, Union,
}; };
impl Ty { impl Ty {
@ -640,8 +647,10 @@ fn type_for_builtin(def: BuiltinType) -> Ty {
BuiltinType::Char => TypeCtor::Char, BuiltinType::Char => TypeCtor::Char,
BuiltinType::Bool => TypeCtor::Bool, BuiltinType::Bool => TypeCtor::Bool,
BuiltinType::Str => TypeCtor::Str, BuiltinType::Str => TypeCtor::Str,
BuiltinType::Int(ty) => TypeCtor::Int(ty.into()), BuiltinType::Int { signedness, bitness } => {
BuiltinType::Float(ty) => TypeCtor::Float(ty.into()), TypeCtor::Int(IntTy { signedness, bitness }.into())
}
BuiltinType::Float { bitness } => TypeCtor::Float(FloatTy { bitness }.into()),
}) })
} }

View file

@ -5,18 +5,17 @@
use std::sync::Arc; use std::sync::Arc;
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use hir_def::CrateModuleId;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use super::{autoderef, lower, Canonical, InEnvironment, TraitEnvironment, TraitRef}; use super::{autoderef, lower, Canonical, InEnvironment, TraitEnvironment, TraitRef};
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
impl_block::{ImplBlock, ImplId}, impl_block::{ImplBlock, ImplId},
nameres::CrateModuleId,
resolve::Resolver, resolve::Resolver,
ty::primitive::{FloatBitness, UncertainFloatTy, UncertainIntTy}, ty::primitive::{FloatBitness, UncertainFloatTy, UncertainIntTy},
ty::{Ty, TypeCtor}, ty::{Ty, TypeCtor},
type_ref::Mutability, AssocItem, Crate, Function, Module, Mutability, Name, Trait,
AssocItem, Crate, Function, Module, Name, Trait,
}; };
/// This is used as a key for indexing impls. /// This is used as a key for indexing impls.
@ -50,7 +49,7 @@ impl CrateImplBlocks {
let fingerprint = TyFingerprint::for_impl(ty); let fingerprint = TyFingerprint::for_impl(ty);
fingerprint.and_then(|f| self.impls.get(&f)).into_iter().flat_map(|i| i.iter()).map( fingerprint.and_then(|f| self.impls.get(&f)).into_iter().flat_map(|i| i.iter()).map(
move |(module_id, impl_id)| { move |(module_id, impl_id)| {
let module = Module { krate: self.krate, module_id: *module_id }; let module = Module::new(self.krate, *module_id);
ImplBlock::from_id(module, *impl_id) ImplBlock::from_id(module, *impl_id)
}, },
) )
@ -62,7 +61,7 @@ impl CrateImplBlocks {
) -> impl Iterator<Item = ImplBlock> + 'a { ) -> impl Iterator<Item = ImplBlock> + 'a {
self.impls_by_trait.get(&tr).into_iter().flat_map(|i| i.iter()).map( self.impls_by_trait.get(&tr).into_iter().flat_map(|i| i.iter()).map(
move |(module_id, impl_id)| { move |(module_id, impl_id)| {
let module = Module { krate: self.krate, module_id: *module_id }; let module = Module::new(self.krate, *module_id);
ImplBlock::from_id(module, *impl_id) ImplBlock::from_id(module, *impl_id)
}, },
) )
@ -71,7 +70,7 @@ impl CrateImplBlocks {
pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplBlock> + 'a { pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplBlock> + 'a {
self.impls.values().chain(self.impls_by_trait.values()).flat_map(|i| i.iter()).map( self.impls.values().chain(self.impls_by_trait.values()).flat_map(|i| i.iter()).map(
move |(module_id, impl_id)| { move |(module_id, impl_id)| {
let module = Module { krate: self.krate, module_id: *module_id }; let module = Module::new(self.krate, *module_id);
ImplBlock::from_id(module, *impl_id) ImplBlock::from_id(module, *impl_id)
}, },
) )
@ -90,14 +89,14 @@ impl CrateImplBlocks {
self.impls_by_trait self.impls_by_trait
.entry(tr.trait_) .entry(tr.trait_)
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push((module.module_id, impl_id)); .push((module.id.module_id, impl_id));
} }
} else { } else {
if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) { if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) {
self.impls self.impls
.entry(target_ty_fp) .entry(target_ty_fp)
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.push((module.module_id, impl_id)); .push((module.id.module_id, impl_id));
} }
} }
} }

View file

@ -2,27 +2,7 @@
use std::fmt; use std::fmt;
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub use hir_def::builtin_type::{FloatBitness, IntBitness, Signedness};
pub enum Signedness {
Signed,
Unsigned,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum IntBitness {
Xsize,
X8,
X16,
X32,
X64,
X128,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum FloatBitness {
X32,
X64,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum UncertainIntTy { pub enum UncertainIntTy {

View file

@ -1,7 +1,7 @@
//! Trait solving using Chalk. //! Trait solving using Chalk.
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use chalk_ir::cast::Cast; use chalk_ir::{cast::Cast, family::ChalkIr};
use log::debug; use log::debug;
use ra_db::salsa; use ra_db::salsa;
use ra_prof::profile; use ra_prof::profile;
@ -33,7 +33,7 @@ impl TraitSolver {
fn solve( fn solve(
&self, &self,
db: &impl HirDatabase, db: &impl HirDatabase,
goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal>>, goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<ChalkIr>>>,
) -> Option<chalk_solve::Solution> { ) -> Option<chalk_solve::Solution> {
let context = ChalkContext { db, krate: self.krate }; let context = ChalkContext { db, krate: self.krate };
debug!("solve goal: {:?}", goal); debug!("solve goal: {:?}", goal);
@ -196,7 +196,7 @@ pub(crate) fn trait_solve_query(
} }
fn solution_from_chalk(db: &impl HirDatabase, solution: chalk_solve::Solution) -> Solution { fn solution_from_chalk(db: &impl HirDatabase, solution: chalk_solve::Solution) -> Solution {
let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution>| { let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<ChalkIr>>| {
let value = subst let value = subst
.value .value
.parameters .parameters

View file

@ -4,11 +4,13 @@ use std::sync::Arc;
use log::debug; use log::debug;
use chalk_ir::{ use chalk_ir::{
cast::Cast, Identifier, ImplId, Parameter, PlaceholderIndex, TypeId, TypeKindId, TypeName, cast::Cast, family::ChalkIr, Identifier, ImplId, Parameter, PlaceholderIndex, TypeId,
UniverseIndex, TypeKindId, TypeName, UniverseIndex,
}; };
use chalk_rust_ir::{AssociatedTyDatum, ImplDatum, StructDatum, TraitDatum}; use chalk_rust_ir::{AssociatedTyDatum, ImplDatum, StructDatum, TraitDatum};
use hir_expand::name;
use ra_db::salsa::{InternId, InternKey}; use ra_db::salsa::{InternId, InternKey};
use super::{Canonical, ChalkContext, Impl, Obligation}; use super::{Canonical, ChalkContext, Impl, Obligation};
@ -38,8 +40,8 @@ where
} }
impl ToChalk for Ty { impl ToChalk for Ty {
type Chalk = chalk_ir::Ty; type Chalk = chalk_ir::Ty<ChalkIr>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Ty { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Ty<ChalkIr> {
match self { match self {
Ty::Apply(apply_ty) => { Ty::Apply(apply_ty) => {
let name = match apply_ty.ctor { let name = match apply_ty.ctor {
@ -62,21 +64,19 @@ impl ToChalk for Ty {
chalk_ir::ProjectionTy { associated_ty_id, parameters }.cast() chalk_ir::ProjectionTy { associated_ty_id, parameters }.cast()
} }
Ty::Param { idx, .. } => { Ty::Param { idx, .. } => {
PlaceholderIndex { ui: UniverseIndex::ROOT, idx: idx as usize }.to_ty() PlaceholderIndex { ui: UniverseIndex::ROOT, idx: idx as usize }.to_ty::<ChalkIr>()
} }
Ty::Bound(idx) => chalk_ir::Ty::BoundVar(idx as usize), Ty::Bound(idx) => chalk_ir::Ty::BoundVar(idx as usize),
Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"), Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"),
// FIXME this is clearly incorrect, but probably not too incorrect // FIXME use Chalk's Dyn/Opaque once the bugs with that are fixed
// and I'm not sure what to actually do with Ty::Unknown
// maybe an alternative would be `for<T> T`? (meaningless in rust, but expressible in chalk's Ty)
//
// FIXME also dyn and impl Trait are currently handled like Unknown because Chalk doesn't have them yet
Ty::Unknown | Ty::Dyn(_) | Ty::Opaque(_) => { Ty::Unknown | Ty::Dyn(_) | Ty::Opaque(_) => {
PlaceholderIndex { ui: UniverseIndex::ROOT, idx: usize::max_value() }.to_ty() let parameters = Vec::new();
let name = TypeName::Error;
chalk_ir::ApplicationTy { name, parameters }.cast()
} }
} }
} }
fn from_chalk(db: &impl HirDatabase, chalk: chalk_ir::Ty) -> Self { fn from_chalk(db: &impl HirDatabase, chalk: chalk_ir::Ty<ChalkIr>) -> Self {
match chalk { match chalk {
chalk_ir::Ty::Apply(apply_ty) => { chalk_ir::Ty::Apply(apply_ty) => {
// FIXME this is kind of hacky due to the fact that // FIXME this is kind of hacky due to the fact that
@ -92,6 +92,7 @@ impl ToChalk for Ty {
let parameters = from_chalk(db, apply_ty.parameters); let parameters = from_chalk(db, apply_ty.parameters);
Ty::Apply(ApplicationTy { ctor, parameters }) Ty::Apply(ApplicationTy { ctor, parameters })
} }
TypeName::Error => Ty::Unknown,
// FIXME handle TypeKindId::Trait/Type here // FIXME handle TypeKindId::Trait/Type here
TypeName::TypeKindId(_) => unimplemented!(), TypeName::TypeKindId(_) => unimplemented!(),
TypeName::Placeholder(idx) => { TypeName::Placeholder(idx) => {
@ -108,18 +109,30 @@ impl ToChalk for Ty {
chalk_ir::Ty::ForAll(_) => unimplemented!(), chalk_ir::Ty::ForAll(_) => unimplemented!(),
chalk_ir::Ty::BoundVar(idx) => Ty::Bound(idx as u32), chalk_ir::Ty::BoundVar(idx) => Ty::Bound(idx as u32),
chalk_ir::Ty::InferenceVar(_iv) => Ty::Unknown, chalk_ir::Ty::InferenceVar(_iv) => Ty::Unknown,
chalk_ir::Ty::Dyn(where_clauses) => {
assert_eq!(where_clauses.binders.len(), 1);
let predicates =
where_clauses.value.into_iter().map(|c| from_chalk(db, c)).collect();
Ty::Dyn(predicates)
}
chalk_ir::Ty::Opaque(where_clauses) => {
assert_eq!(where_clauses.binders.len(), 1);
let predicates =
where_clauses.value.into_iter().map(|c| from_chalk(db, c)).collect();
Ty::Opaque(predicates)
}
} }
} }
} }
impl ToChalk for Substs { impl ToChalk for Substs {
type Chalk = Vec<chalk_ir::Parameter>; type Chalk = Vec<chalk_ir::Parameter<ChalkIr>>;
fn to_chalk(self, db: &impl HirDatabase) -> Vec<Parameter> { fn to_chalk(self, db: &impl HirDatabase) -> Vec<Parameter<ChalkIr>> {
self.iter().map(|ty| ty.clone().to_chalk(db).cast()).collect() self.iter().map(|ty| ty.clone().to_chalk(db).cast()).collect()
} }
fn from_chalk(db: &impl HirDatabase, parameters: Vec<chalk_ir::Parameter>) -> Substs { fn from_chalk(db: &impl HirDatabase, parameters: Vec<chalk_ir::Parameter<ChalkIr>>) -> Substs {
let tys = parameters let tys = parameters
.into_iter() .into_iter()
.map(|p| match p { .map(|p| match p {
@ -132,15 +145,15 @@ impl ToChalk for Substs {
} }
impl ToChalk for TraitRef { impl ToChalk for TraitRef {
type Chalk = chalk_ir::TraitRef; type Chalk = chalk_ir::TraitRef<ChalkIr>;
fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef { fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<ChalkIr> {
let trait_id = self.trait_.to_chalk(db); let trait_id = self.trait_.to_chalk(db);
let parameters = self.substs.to_chalk(db); let parameters = self.substs.to_chalk(db);
chalk_ir::TraitRef { trait_id, parameters } chalk_ir::TraitRef { trait_id, parameters }
} }
fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef) -> Self { fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<ChalkIr>) -> Self {
let trait_ = from_chalk(db, trait_ref.trait_id); let trait_ = from_chalk(db, trait_ref.trait_id);
let substs = from_chalk(db, trait_ref.parameters); let substs = from_chalk(db, trait_ref.parameters);
TraitRef { trait_, substs } TraitRef { trait_, substs }
@ -151,11 +164,11 @@ impl ToChalk for Trait {
type Chalk = chalk_ir::TraitId; type Chalk = chalk_ir::TraitId;
fn to_chalk(self, _db: &impl HirDatabase) -> chalk_ir::TraitId { fn to_chalk(self, _db: &impl HirDatabase) -> chalk_ir::TraitId {
self.id.into() chalk_ir::TraitId(id_to_chalk(self.id))
} }
fn from_chalk(_db: &impl HirDatabase, trait_id: chalk_ir::TraitId) -> Trait { fn from_chalk(_db: &impl HirDatabase, trait_id: chalk_ir::TraitId) -> Trait {
Trait { id: trait_id.into() } Trait { id: id_from_chalk(trait_id.0) }
} }
} }
@ -187,18 +200,18 @@ impl ToChalk for TypeAlias {
type Chalk = chalk_ir::TypeId; type Chalk = chalk_ir::TypeId;
fn to_chalk(self, _db: &impl HirDatabase) -> chalk_ir::TypeId { fn to_chalk(self, _db: &impl HirDatabase) -> chalk_ir::TypeId {
self.id.into() chalk_ir::TypeId(id_to_chalk(self.id))
} }
fn from_chalk(_db: &impl HirDatabase, impl_id: chalk_ir::TypeId) -> TypeAlias { fn from_chalk(_db: &impl HirDatabase, type_alias_id: chalk_ir::TypeId) -> TypeAlias {
TypeAlias { id: impl_id.into() } TypeAlias { id: id_from_chalk(type_alias_id.0) }
} }
} }
impl ToChalk for GenericPredicate { impl ToChalk for GenericPredicate {
type Chalk = chalk_ir::QuantifiedWhereClause; type Chalk = chalk_ir::QuantifiedWhereClause<ChalkIr>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::QuantifiedWhereClause { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::QuantifiedWhereClause<ChalkIr> {
match self { match self {
GenericPredicate::Implemented(trait_ref) => { GenericPredicate::Implemented(trait_ref) => {
make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0)
@ -221,25 +234,40 @@ impl ToChalk for GenericPredicate {
} }
fn from_chalk( fn from_chalk(
_db: &impl HirDatabase, db: &impl HirDatabase,
_where_clause: chalk_ir::QuantifiedWhereClause, where_clause: chalk_ir::QuantifiedWhereClause<ChalkIr>,
) -> GenericPredicate { ) -> GenericPredicate {
// This should never need to be called match where_clause.value {
unimplemented!() chalk_ir::WhereClause::Implemented(tr) => {
if tr.trait_id == UNKNOWN_TRAIT {
// FIXME we need an Error enum on the Chalk side to avoid this
return GenericPredicate::Error;
}
GenericPredicate::Implemented(from_chalk(db, tr))
}
chalk_ir::WhereClause::ProjectionEq(projection_eq) => {
let projection_ty = from_chalk(db, projection_eq.projection);
let ty = from_chalk(db, projection_eq.ty);
GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty })
}
}
} }
} }
impl ToChalk for ProjectionTy { impl ToChalk for ProjectionTy {
type Chalk = chalk_ir::ProjectionTy; type Chalk = chalk_ir::ProjectionTy<ChalkIr>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::ProjectionTy { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::ProjectionTy<ChalkIr> {
chalk_ir::ProjectionTy { chalk_ir::ProjectionTy {
associated_ty_id: self.associated_ty.to_chalk(db), associated_ty_id: self.associated_ty.to_chalk(db),
parameters: self.parameters.to_chalk(db), parameters: self.parameters.to_chalk(db),
} }
} }
fn from_chalk(db: &impl HirDatabase, projection_ty: chalk_ir::ProjectionTy) -> ProjectionTy { fn from_chalk(
db: &impl HirDatabase,
projection_ty: chalk_ir::ProjectionTy<ChalkIr>,
) -> ProjectionTy {
ProjectionTy { ProjectionTy {
associated_ty: from_chalk(db, projection_ty.associated_ty_id), associated_ty: from_chalk(db, projection_ty.associated_ty_id),
parameters: from_chalk(db, projection_ty.parameters), parameters: from_chalk(db, projection_ty.parameters),
@ -248,31 +276,31 @@ impl ToChalk for ProjectionTy {
} }
impl ToChalk for super::ProjectionPredicate { impl ToChalk for super::ProjectionPredicate {
type Chalk = chalk_ir::Normalize; type Chalk = chalk_ir::Normalize<ChalkIr>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<ChalkIr> {
chalk_ir::Normalize { chalk_ir::Normalize {
projection: self.projection_ty.to_chalk(db), projection: self.projection_ty.to_chalk(db),
ty: self.ty.to_chalk(db), ty: self.ty.to_chalk(db),
} }
} }
fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize) -> Self { fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<ChalkIr>) -> Self {
unimplemented!() unimplemented!()
} }
} }
impl ToChalk for Obligation { impl ToChalk for Obligation {
type Chalk = chalk_ir::DomainGoal; type Chalk = chalk_ir::DomainGoal<ChalkIr>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::DomainGoal { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::DomainGoal<ChalkIr> {
match self { match self {
Obligation::Trait(tr) => tr.to_chalk(db).cast(), Obligation::Trait(tr) => tr.to_chalk(db).cast(),
Obligation::Projection(pr) => pr.to_chalk(db).cast(), Obligation::Projection(pr) => pr.to_chalk(db).cast(),
} }
} }
fn from_chalk(_db: &impl HirDatabase, _goal: chalk_ir::DomainGoal) -> Self { fn from_chalk(_db: &impl HirDatabase, _goal: chalk_ir::DomainGoal<ChalkIr>) -> Self {
unimplemented!() unimplemented!()
} }
} }
@ -296,16 +324,16 @@ where
} }
impl ToChalk for Arc<super::TraitEnvironment> { impl ToChalk for Arc<super::TraitEnvironment> {
type Chalk = Arc<chalk_ir::Environment>; type Chalk = chalk_ir::Environment<ChalkIr>;
fn to_chalk(self, db: &impl HirDatabase) -> Arc<chalk_ir::Environment> { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Environment<ChalkIr> {
let mut clauses = Vec::new(); let mut clauses = Vec::new();
for pred in &self.predicates { for pred in &self.predicates {
if pred.is_error() { if pred.is_error() {
// for env, we just ignore errors // for env, we just ignore errors
continue; continue;
} }
let program_clause: chalk_ir::ProgramClause = pred.clone().to_chalk(db).cast(); let program_clause: chalk_ir::ProgramClause<ChalkIr> = pred.clone().to_chalk(db).cast();
clauses.push(program_clause.into_from_env_clause()); clauses.push(program_clause.into_from_env_clause());
} }
chalk_ir::Environment::new().add_clauses(clauses) chalk_ir::Environment::new().add_clauses(clauses)
@ -313,13 +341,16 @@ impl ToChalk for Arc<super::TraitEnvironment> {
fn from_chalk( fn from_chalk(
_db: &impl HirDatabase, _db: &impl HirDatabase,
_env: Arc<chalk_ir::Environment>, _env: chalk_ir::Environment<ChalkIr>,
) -> Arc<super::TraitEnvironment> { ) -> Arc<super::TraitEnvironment> {
unimplemented!() unimplemented!()
} }
} }
impl<T: ToChalk> ToChalk for super::InEnvironment<T> { impl<T: ToChalk> ToChalk for super::InEnvironment<T>
where
T::Chalk: chalk_ir::family::HasTypeFamily<TypeFamily = ChalkIr>,
{
type Chalk = chalk_ir::InEnvironment<T::Chalk>; type Chalk = chalk_ir::InEnvironment<T::Chalk>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::InEnvironment<T::Chalk> { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::InEnvironment<T::Chalk> {
@ -351,7 +382,7 @@ fn convert_where_clauses(
db: &impl HirDatabase, db: &impl HirDatabase,
def: GenericDef, def: GenericDef,
substs: &Substs, substs: &Substs,
) -> Vec<chalk_ir::QuantifiedWhereClause> { ) -> Vec<chalk_ir::QuantifiedWhereClause<ChalkIr>> {
let generic_predicates = db.generic_predicates(def); let generic_predicates = db.generic_predicates(def);
let mut result = Vec::with_capacity(generic_predicates.len()); let mut result = Vec::with_capacity(generic_predicates.len());
for pred in generic_predicates.iter() { for pred in generic_predicates.iter() {
@ -384,7 +415,7 @@ where
fn impls_for_trait( fn impls_for_trait(
&self, &self,
trait_id: chalk_ir::TraitId, trait_id: chalk_ir::TraitId,
parameters: &[Parameter], parameters: &[Parameter<ChalkIr>],
) -> Vec<ImplId> { ) -> Vec<ImplId> {
debug!("impls_for_trait {:?}", trait_id); debug!("impls_for_trait {:?}", trait_id);
if trait_id == UNKNOWN_TRAIT { if trait_id == UNKNOWN_TRAIT {
@ -430,13 +461,13 @@ where
} }
fn split_projection<'p>( fn split_projection<'p>(
&self, &self,
projection: &'p chalk_ir::ProjectionTy, projection: &'p chalk_ir::ProjectionTy<ChalkIr>,
) -> (Arc<AssociatedTyDatum>, &'p [Parameter], &'p [Parameter]) { ) -> (Arc<AssociatedTyDatum>, &'p [Parameter<ChalkIr>], &'p [Parameter<ChalkIr>]) {
debug!("split_projection {:?}", projection); debug!("split_projection {:?}", projection);
// we don't support GATs, so I think this should always be correct currently // we don't support GATs, so I think this should always be correct currently
(self.db.associated_ty_data(projection.associated_ty_id), &projection.parameters, &[]) (self.db.associated_ty_data(projection.associated_ty_id), &projection.parameters, &[])
} }
fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause> { fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<ChalkIr>> {
vec![] vec![]
} }
fn local_impls_to_coherence_check( fn local_impls_to_coherence_check(
@ -508,7 +539,7 @@ pub(crate) fn trait_datum_query(
let trait_ref = trait_.trait_ref(db).subst(&bound_vars).to_chalk(db); let trait_ref = trait_.trait_ref(db).subst(&bound_vars).to_chalk(db);
let flags = chalk_rust_ir::TraitFlags { let flags = chalk_rust_ir::TraitFlags {
auto: trait_.is_auto(db), auto: trait_.is_auto(db),
upstream: trait_.module(db).krate(db) != Some(krate), upstream: trait_.module(db).krate() != krate,
non_enumerable: true, non_enumerable: true,
// FIXME set these flags correctly // FIXME set these flags correctly
marker: false, marker: false,
@ -596,7 +627,7 @@ fn impl_block_datum(
.target_trait_ref(db) .target_trait_ref(db)
.expect("FIXME handle unresolved impl block trait ref") .expect("FIXME handle unresolved impl block trait ref")
.subst(&bound_vars); .subst(&bound_vars);
let impl_type = if impl_block.module().krate(db) == Some(krate) { let impl_type = if impl_block.module().krate() == krate {
chalk_rust_ir::ImplType::Local chalk_rust_ir::ImplType::Local
} else { } else {
chalk_rust_ir::ImplType::External chalk_rust_ir::ImplType::External
@ -705,7 +736,7 @@ fn closure_fn_trait_impl_datum(
substs: Substs::build_for_def(db, trait_).push(self_ty).push(arg_ty).build(), substs: Substs::build_for_def(db, trait_).push(self_ty).push(arg_ty).build(),
}; };
let output_ty_id = fn_once_trait.associated_type_by_name(db, &crate::name::OUTPUT_TYPE)?; let output_ty_id = fn_once_trait.associated_type_by_name(db, &name::OUTPUT_TYPE)?;
let output_ty_value = chalk_rust_ir::AssociatedTyValue { let output_ty_value = chalk_rust_ir::AssociatedTyValue {
associated_ty_id: output_ty_id.to_chalk(db), associated_ty_id: output_ty_id.to_chalk(db),
@ -746,30 +777,6 @@ fn id_to_chalk<T: InternKey>(salsa_id: T) -> chalk_ir::RawId {
chalk_ir::RawId { index: salsa_id.as_intern_id().as_u32() } chalk_ir::RawId { index: salsa_id.as_intern_id().as_u32() }
} }
impl From<chalk_ir::TraitId> for crate::ids::TraitId {
fn from(trait_id: chalk_ir::TraitId) -> Self {
id_from_chalk(trait_id.0)
}
}
impl From<crate::ids::TraitId> for chalk_ir::TraitId {
fn from(trait_id: crate::ids::TraitId) -> Self {
chalk_ir::TraitId(id_to_chalk(trait_id))
}
}
impl From<chalk_ir::TypeId> for crate::ids::TypeAliasId {
fn from(type_id: chalk_ir::TypeId) -> Self {
id_from_chalk(type_id.0)
}
}
impl From<crate::ids::TypeAliasId> for chalk_ir::TypeId {
fn from(type_id: crate::ids::TypeAliasId) -> Self {
chalk_ir::TypeId(id_to_chalk(type_id))
}
}
impl From<chalk_ir::StructId> for crate::ids::TypeCtorId { impl From<chalk_ir::StructId> for crate::ids::TypeCtorId {
fn from(struct_id: chalk_ir::StructId) -> Self { fn from(struct_id: chalk_ir::StructId) -> Self {
id_from_chalk(struct_id.0) id_from_chalk(struct_id.0)

View file

@ -2,12 +2,13 @@
use std::sync::Arc; use std::sync::Arc;
use hir_def::type_ref::TypeRef;
use hir_expand::name::{AsName, Name};
use ra_syntax::ast::NameOwner; use ra_syntax::ast::NameOwner;
use crate::{ use crate::{
db::{AstDatabase, DefDatabase}, db::{AstDatabase, DefDatabase},
name::{AsName, Name},
type_ref::TypeRef,
HasSource, TypeAlias, HasSource, TypeAlias,
}; };

View file

@ -0,0 +1,21 @@
[package]
edition = "2018"
name = "ra_hir_def"
version = "0.1.0"
authors = ["rust-analyzer developers"]
[dependencies]
log = "0.4.5"
once_cell = "1.0.1"
relative-path = "1.0.0"
rustc-hash = "1.0"
ra_arena = { path = "../ra_arena" }
ra_db = { path = "../ra_db" }
ra_syntax = { path = "../ra_syntax" }
ra_prof = { path = "../ra_prof" }
hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" }
test_utils = { path = "../test_utils" }
mbe = { path = "../ra_mbe", package = "ra_mbe" }
ra_cfg = { path = "../ra_cfg" }
tt = { path = "../ra_tt", package = "ra_tt" }

View file

@ -2,6 +2,7 @@
use std::sync::Arc; use std::sync::Arc;
use hir_expand::hygiene::Hygiene;
use mbe::ast_to_token_tree; use mbe::ast_to_token_tree;
use ra_cfg::CfgOptions; use ra_cfg::CfgOptions;
use ra_syntax::{ use ra_syntax::{
@ -10,10 +11,10 @@ use ra_syntax::{
}; };
use tt::Subtree; use tt::Subtree;
use crate::{db::AstDatabase, path::Path, HirFileId, Source}; use crate::path::Path;
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct Attr { pub struct Attr {
pub(crate) path: Path, pub(crate) path: Path,
pub(crate) input: Option<AttrInput>, pub(crate) input: Option<AttrInput>,
} }
@ -25,11 +26,8 @@ pub enum AttrInput {
} }
impl Attr { impl Attr {
pub(crate) fn from_src( pub(crate) fn from_src(ast: ast::Attr, hygiene: &Hygiene) -> Option<Attr> {
Source { file_id, ast }: Source<ast::Attr>, let path = Path::from_src(ast.path()?, hygiene)?;
db: &impl AstDatabase,
) -> Option<Attr> {
let path = Path::from_src(Source { file_id, ast: ast.path()? }, db)?;
let input = match ast.input() { let input = match ast.input() {
None => None, None => None,
Some(ast::AttrInput::Literal(lit)) => { Some(ast::AttrInput::Literal(lit)) => {
@ -45,26 +43,22 @@ impl Attr {
Some(Attr { path, input }) Some(Attr { path, input })
} }
pub(crate) fn from_attrs_owner( pub fn from_attrs_owner(owner: &dyn AttrsOwner, hygiene: &Hygiene) -> Option<Arc<[Attr]>> {
file_id: HirFileId,
owner: &dyn AttrsOwner,
db: &impl AstDatabase,
) -> Option<Arc<[Attr]>> {
let mut attrs = owner.attrs().peekable(); let mut attrs = owner.attrs().peekable();
if attrs.peek().is_none() { if attrs.peek().is_none() {
// Avoid heap allocation // Avoid heap allocation
return None; return None;
} }
Some(attrs.flat_map(|ast| Attr::from_src(Source { file_id, ast }, db)).collect()) Some(attrs.flat_map(|ast| Attr::from_src(ast, hygiene)).collect())
} }
pub(crate) fn is_simple_atom(&self, name: &str) -> bool { pub fn is_simple_atom(&self, name: &str) -> bool {
// FIXME: Avoid cloning // FIXME: Avoid cloning
self.path.as_ident().map_or(false, |s| s.to_string() == name) self.path.as_ident().map_or(false, |s| s.to_string() == name)
} }
// FIXME: handle cfg_attr :-) // FIXME: handle cfg_attr :-)
pub(crate) fn as_cfg(&self) -> Option<&Subtree> { pub fn as_cfg(&self) -> Option<&Subtree> {
if !self.is_simple_atom("cfg") { if !self.is_simple_atom("cfg") {
return None; return None;
} }
@ -74,7 +68,7 @@ impl Attr {
} }
} }
pub(crate) fn as_path(&self) -> Option<&SmolStr> { pub fn as_path(&self) -> Option<&SmolStr> {
if !self.is_simple_atom("path") { if !self.is_simple_atom("path") {
return None; return None;
} }
@ -84,7 +78,7 @@ impl Attr {
} }
} }
pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Option<bool> { pub fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> Option<bool> {
cfg_options.is_cfg_enabled(self.as_cfg()?) cfg_options.is_cfg_enabled(self.as_cfg()?)
} }
} }

View file

@ -0,0 +1,63 @@
//! This module defines built-in types.
//!
//! A peculiarity of built-in types is that they are always available and are
//! not associated with any particular crate.
use hir_expand::name::{self, Name};
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Signedness {
Signed,
Unsigned,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum IntBitness {
Xsize,
X8,
X16,
X32,
X64,
X128,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum FloatBitness {
X32,
X64,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinType {
Char,
Bool,
Str,
Int { signedness: Signedness, bitness: IntBitness },
Float { bitness: FloatBitness },
}
impl BuiltinType {
#[rustfmt::skip]
pub const ALL: &'static [(Name, BuiltinType)] = &[
(name::CHAR, BuiltinType::Char),
(name::BOOL, BuiltinType::Bool),
(name::STR, BuiltinType::Str ),
(name::ISIZE, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::Xsize }),
(name::I8, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X8 }),
(name::I16, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X16 }),
(name::I32, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X32 }),
(name::I64, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X64 }),
(name::I128, BuiltinType::Int { signedness: Signedness::Signed, bitness: IntBitness::X128 }),
(name::USIZE, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::Xsize }),
(name::U8, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X8 }),
(name::U16, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X16 }),
(name::U32, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X32 }),
(name::U64, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X64 }),
(name::U128, BuiltinType::Int { signedness: Signedness::Unsigned, bitness: IntBitness::X128 }),
(name::F32, BuiltinType::Float { bitness: FloatBitness::X32 }),
(name::F64, BuiltinType::Float { bitness: FloatBitness::X64 }),
];
}

View file

@ -0,0 +1,40 @@
//! Defines database & queries for name resolution.
use std::sync::Arc;
use hir_expand::{db::AstDatabase, HirFileId};
use ra_db::{salsa, SourceDatabase};
use ra_syntax::ast;
use crate::nameres::raw::{ImportSourceMap, RawItems};
#[salsa::query_group(InternDatabaseStorage)]
pub trait InternDatabase: SourceDatabase {
#[salsa::interned]
fn intern_function(&self, loc: crate::ItemLoc<ast::FnDef>) -> crate::FunctionId;
#[salsa::interned]
fn intern_struct(&self, loc: crate::ItemLoc<ast::StructDef>) -> crate::StructId;
#[salsa::interned]
fn intern_union(&self, loc: crate::ItemLoc<ast::StructDef>) -> crate::UnionId;
#[salsa::interned]
fn intern_enum(&self, loc: crate::ItemLoc<ast::EnumDef>) -> crate::EnumId;
#[salsa::interned]
fn intern_const(&self, loc: crate::ItemLoc<ast::ConstDef>) -> crate::ConstId;
#[salsa::interned]
fn intern_static(&self, loc: crate::ItemLoc<ast::StaticDef>) -> crate::StaticId;
#[salsa::interned]
fn intern_trait(&self, loc: crate::ItemLoc<ast::TraitDef>) -> crate::TraitId;
#[salsa::interned]
fn intern_type_alias(&self, loc: crate::ItemLoc<ast::TypeAliasDef>) -> crate::TypeAliasId;
}
#[salsa::query_group(DefDatabase2Storage)]
pub trait DefDatabase2: InternDatabase + AstDatabase {
#[salsa::invoke(RawItems::raw_items_with_source_map_query)]
fn raw_items_with_source_map(
&self,
file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>);
#[salsa::invoke(RawItems::raw_items_query)]
fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;
}

View file

@ -0,0 +1,362 @@
//! `hir_def` crate contains everything between macro expansion and type
//! inference.
//!
//! It defines various items (structs, enums, traits) which comprises Rust code,
//! as well as an algorithm for resolving paths to such entities.
//!
//! Note that `hir_def` is a work in progress, so not all of the above is
//! actually true.
pub mod db;
pub mod attr;
pub mod path;
pub mod type_ref;
pub mod builtin_type;
// FIXME: this should be private
pub mod nameres;
use std::hash::{Hash, Hasher};
use hir_expand::{ast_id_map::FileAstId, db::AstDatabase, AstId, HirFileId};
use ra_arena::{impl_arena_id, RawId};
use ra_db::{salsa, CrateId, FileId};
use ra_syntax::{ast, AstNode, SyntaxNode};
use crate::{builtin_type::BuiltinType, db::InternDatabase};
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct Source<T> {
pub file_id: HirFileId,
pub ast: T,
}
pub enum ModuleSource {
SourceFile(ast::SourceFile),
Module(ast::Module),
}
impl ModuleSource {
pub fn new(
db: &impl db::DefDatabase2,
file_id: Option<FileId>,
decl_id: Option<AstId<ast::Module>>,
) -> ModuleSource {
match (file_id, decl_id) {
(Some(file_id), _) => {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
(None, Some(item_id)) => {
let module = item_id.to_node(db);
assert!(module.item_list().is_some(), "expected inline module");
ModuleSource::Module(module)
}
(None, None) => panic!(),
}
}
// FIXME: this methods do not belong here
pub fn from_position(
db: &impl db::DefDatabase2,
position: ra_db::FilePosition,
) -> ModuleSource {
let parse = db.parse(position.file_id);
match &ra_syntax::algo::find_node_at_offset::<ast::Module>(
parse.tree().syntax(),
position.offset,
) {
Some(m) if !m.has_semi() => ModuleSource::Module(m.clone()),
_ => {
let source_file = parse.tree();
ModuleSource::SourceFile(source_file)
}
}
}
pub fn from_child_node(
db: &impl db::DefDatabase2,
file_id: FileId,
child: &SyntaxNode,
) -> ModuleSource {
if let Some(m) = child.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi()) {
ModuleSource::Module(m)
} else {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
}
pub fn from_file_id(db: &impl db::DefDatabase2, file_id: FileId) -> ModuleSource {
let source_file = db.parse(file_id).tree();
ModuleSource::SourceFile(source_file)
}
}
impl<T> Source<T> {
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
Source { file_id: self.file_id, ast: f(self.ast) }
}
pub fn file_syntax(&self, db: &impl AstDatabase) -> SyntaxNode {
db.parse_or_expand(self.file_id).expect("source created from invalid file")
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ModuleId {
pub krate: CrateId,
pub module_id: CrateModuleId,
}
/// An ID of a module, **local** to a specific crate
// FIXME: rename to `LocalModuleId`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct CrateModuleId(RawId);
impl_arena_id!(CrateModuleId);
macro_rules! impl_intern_key {
($name:ident) => {
impl salsa::InternKey for $name {
fn from_intern_id(v: salsa::InternId) -> Self {
$name(v)
}
fn as_intern_id(&self) -> salsa::InternId {
self.0
}
}
};
}
#[derive(Debug)]
pub struct ItemLoc<N: AstNode> {
pub(crate) module: ModuleId,
ast_id: AstId<N>,
}
impl<N: AstNode> PartialEq for ItemLoc<N> {
fn eq(&self, other: &Self) -> bool {
self.module == other.module && self.ast_id == other.ast_id
}
}
impl<N: AstNode> Eq for ItemLoc<N> {}
impl<N: AstNode> Hash for ItemLoc<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.module.hash(hasher);
self.ast_id.hash(hasher);
}
}
impl<N: AstNode> Clone for ItemLoc<N> {
fn clone(&self) -> ItemLoc<N> {
ItemLoc { module: self.module, ast_id: self.ast_id }
}
}
#[derive(Clone, Copy)]
pub struct LocationCtx<DB> {
db: DB,
module: ModuleId,
file_id: HirFileId,
}
impl<'a, DB> LocationCtx<&'a DB> {
pub fn new(db: &'a DB, module: ModuleId, file_id: HirFileId) -> LocationCtx<&'a DB> {
LocationCtx { db, module, file_id }
}
}
impl<'a, DB: AstDatabase + InternDatabase> LocationCtx<&'a DB> {
pub fn to_def<N, DEF>(self, ast: &N) -> DEF
where
N: AstNode,
DEF: AstItemDef<N>,
{
DEF::from_ast(self, ast)
}
}
pub trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
fn intern(db: &impl InternDatabase, loc: ItemLoc<N>) -> Self;
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<N>;
fn from_ast(ctx: LocationCtx<&(impl AstDatabase + InternDatabase)>, ast: &N) -> Self {
let items = ctx.db.ast_id_map(ctx.file_id);
let item_id = items.ast_id(ast);
Self::from_ast_id(ctx, item_id)
}
fn from_ast_id(ctx: LocationCtx<&impl InternDatabase>, ast_id: FileAstId<N>) -> Self {
let loc = ItemLoc { module: ctx.module, ast_id: AstId::new(ctx.file_id, ast_id) };
Self::intern(ctx.db, loc)
}
fn source(self, db: &(impl AstDatabase + InternDatabase)) -> Source<N> {
let loc = self.lookup_intern(db);
let ast = loc.ast_id.to_node(db);
Source { file_id: loc.ast_id.file_id(), ast }
}
fn module(self, db: &impl InternDatabase) -> ModuleId {
let loc = self.lookup_intern(db);
loc.module
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct FunctionId(salsa::InternId);
impl_intern_key!(FunctionId);
impl AstItemDef<ast::FnDef> for FunctionId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::FnDef>) -> Self {
db.intern_function(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::FnDef> {
db.lookup_intern_function(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructId(salsa::InternId);
impl_intern_key!(StructId);
impl AstItemDef<ast::StructDef> for StructId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::StructDef>) -> Self {
db.intern_struct(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::StructDef> {
db.lookup_intern_struct(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct UnionId(salsa::InternId);
impl_intern_key!(UnionId);
impl AstItemDef<ast::StructDef> for UnionId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::StructDef>) -> Self {
db.intern_union(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::StructDef> {
db.lookup_intern_union(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumId(salsa::InternId);
impl_intern_key!(EnumId);
impl AstItemDef<ast::EnumDef> for EnumId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::EnumDef>) -> Self {
db.intern_enum(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::EnumDef> {
db.lookup_intern_enum(self)
}
}
// FIXME: rename to `VariantId`, only enums can ave variants
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EnumVariantId {
parent: EnumId,
local_id: LocalEnumVariantId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct LocalEnumVariantId(RawId);
impl_arena_id!(LocalEnumVariantId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ConstId(salsa::InternId);
impl_intern_key!(ConstId);
impl AstItemDef<ast::ConstDef> for ConstId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::ConstDef>) -> Self {
db.intern_const(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::ConstDef> {
db.lookup_intern_const(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StaticId(salsa::InternId);
impl_intern_key!(StaticId);
impl AstItemDef<ast::StaticDef> for StaticId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::StaticDef>) -> Self {
db.intern_static(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::StaticDef> {
db.lookup_intern_static(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TraitId(salsa::InternId);
impl_intern_key!(TraitId);
impl AstItemDef<ast::TraitDef> for TraitId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::TraitDef>) -> Self {
db.intern_trait(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::TraitDef> {
db.lookup_intern_trait(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TypeAliasId(salsa::InternId);
impl_intern_key!(TypeAliasId);
impl AstItemDef<ast::TypeAliasDef> for TypeAliasId {
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::TypeAliasDef>) -> Self {
db.intern_type_alias(loc)
}
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::TypeAliasDef> {
db.lookup_intern_type_alias(self)
}
}
macro_rules! impl_froms {
($e:ident: $($v:ident $(($($sv:ident),*))?),*) => {
$(
impl From<$v> for $e {
fn from(it: $v) -> $e {
$e::$v(it)
}
}
$($(
impl From<$sv> for $e {
fn from(it: $sv) -> $e {
$e::$v($v::$sv(it))
}
}
)*)?
)*
}
}
/// A Data Type
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum AdtId {
StructId(StructId),
UnionId(UnionId),
EnumId(EnumId),
}
impl_froms!(AdtId: StructId, UnionId, EnumId);
/// The defs which can be visible in the module.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ModuleDefId {
ModuleId(ModuleId),
FunctionId(FunctionId),
AdtId(AdtId),
// Can't be directly declared, but can be imported.
EnumVariantId(EnumVariantId),
ConstId(ConstId),
StaticId(StaticId),
TraitId(TraitId),
TypeAliasId(TypeAliasId),
BuiltinType(BuiltinType),
}
impl_froms!(
ModuleDefId: ModuleId,
FunctionId,
AdtId(StructId, EnumId, UnionId),
EnumVariantId,
ConstId,
StaticId,
TraitId,
TypeAliasId,
BuiltinType
);

View file

@ -0,0 +1,5 @@
//! FIXME: write short doc here
// FIXME: review privacy of submodules
pub mod raw;
pub mod mod_resolution;

View file

@ -1,12 +1,13 @@
//! This module resolves `mod foo;` declaration to file. //! This module resolves `mod foo;` declaration to file.
use hir_expand::name::Name;
use ra_db::FileId; use ra_db::FileId;
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use crate::{db::DefDatabase, HirFileId, Name}; use crate::{db::DefDatabase2, HirFileId};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub(super) struct ModDir { pub struct ModDir {
/// `.` for `mod.rs`, `lib.rs` /// `.` for `mod.rs`, `lib.rs`
/// `./foo` for `foo.rs` /// `./foo` for `foo.rs`
/// `./foo/bar` for `mod bar { mod x; }` nested in `foo.rs` /// `./foo/bar` for `mod bar { mod x; }` nested in `foo.rs`
@ -16,24 +17,17 @@ pub(super) struct ModDir {
} }
impl ModDir { impl ModDir {
pub(super) fn root() -> ModDir { pub fn root() -> ModDir {
ModDir { path: RelativePathBuf::default(), root_non_dir_owner: false } ModDir { path: RelativePathBuf::default(), root_non_dir_owner: false }
} }
pub(super) fn descend_into_definition( pub fn descend_into_definition(&self, name: &Name, attr_path: Option<&SmolStr>) -> ModDir {
&self,
name: &Name,
attr_path: Option<&SmolStr>,
) -> ModDir {
let mut path = self.path.clone(); let mut path = self.path.clone();
match attr_to_path(attr_path) { match attr_to_path(attr_path) {
None => path.push(&name.to_string()), None => path.push(&name.to_string()),
Some(attr_path) => { Some(attr_path) => {
if self.root_non_dir_owner { if self.root_non_dir_owner {
// Workaround for relative path API: turn `lib.rs` into ``. assert!(path.pop());
if !path.pop() {
path = RelativePathBuf::default();
}
} }
path.push(attr_path); path.push(attr_path);
} }
@ -41,24 +35,20 @@ impl ModDir {
ModDir { path, root_non_dir_owner: false } ModDir { path, root_non_dir_owner: false }
} }
pub(super) fn resolve_declaration( pub fn resolve_declaration(
&self, &self,
db: &impl DefDatabase, db: &impl DefDatabase2,
file_id: HirFileId, file_id: HirFileId,
name: &Name, name: &Name,
attr_path: Option<&SmolStr>, attr_path: Option<&SmolStr>,
) -> Result<(FileId, ModDir), RelativePathBuf> { ) -> Result<(FileId, ModDir), RelativePathBuf> {
let empty_path = RelativePathBuf::default();
let file_id = file_id.original_file(db); let file_id = file_id.original_file(db);
let mut candidate_files = Vec::new(); let mut candidate_files = Vec::new();
match attr_to_path(attr_path) { match attr_to_path(attr_path) {
Some(attr_path) => { Some(attr_path) => {
let base = if self.root_non_dir_owner { let base =
self.path.parent().unwrap_or(&empty_path) if self.root_non_dir_owner { self.path.parent().unwrap() } else { &self.path };
} else {
&self.path
};
candidate_files.push(base.join(attr_path)) candidate_files.push(base.join(attr_path))
} }
None => { None => {

View file

@ -2,18 +2,20 @@
use std::{ops::Index, sync::Arc}; use std::{ops::Index, sync::Arc};
use hir_expand::{
ast_id_map::AstIdMap,
db::AstDatabase,
either::Either,
hygiene::Hygiene,
name::{AsName, Name},
};
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
use ra_syntax::{ use ra_syntax::{
ast::{self, AttrsOwner, NameOwner}, ast::{self, AttrsOwner, NameOwner},
AstNode, AstPtr, SourceFile, AstNode, AstPtr, SourceFile,
}; };
use test_utils::tested_by;
use crate::{ use crate::{attr::Attr, db::DefDatabase2, path::Path, FileAstId, HirFileId, ModuleSource, Source};
attr::Attr,
db::{AstDatabase, DefDatabase},
AsName, AstIdMap, Either, FileAstId, HirFileId, ModuleSource, Name, Path, Source,
};
/// `RawItems` is a set of top-level items in a file (except for impls). /// `RawItems` is a set of top-level items in a file (except for impls).
/// ///
@ -37,10 +39,8 @@ pub struct ImportSourceMap {
type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>; type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>;
type ImportSource = Either<ast::UseTree, ast::ExternCrateItem>; type ImportSource = Either<ast::UseTree, ast::ExternCrateItem>;
impl ImportSourcePtr { fn to_node(ptr: ImportSourcePtr, file: &SourceFile) -> ImportSource {
fn to_node(self, file: &SourceFile) -> ImportSource { ptr.map(|ptr| ptr.to_node(file.syntax()), |ptr| ptr.to_node(file.syntax()))
self.map(|ptr| ptr.to_node(file.syntax()), |ptr| ptr.to_node(file.syntax()))
}
} }
impl ImportSourceMap { impl ImportSourceMap {
@ -48,26 +48,26 @@ impl ImportSourceMap {
self.map.insert(import, ptr) self.map.insert(import, ptr)
} }
pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource { pub fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource {
let file = match source { let file = match source {
ModuleSource::SourceFile(file) => file.clone(), ModuleSource::SourceFile(file) => file.clone(),
ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
}; };
self.map[import].to_node(&file) to_node(self.map[import], &file)
} }
} }
impl RawItems { impl RawItems {
pub(crate) fn raw_items_query( pub(crate) fn raw_items_query(
db: &(impl DefDatabase + AstDatabase), db: &(impl DefDatabase2 + AstDatabase),
file_id: HirFileId, file_id: HirFileId,
) -> Arc<RawItems> { ) -> Arc<RawItems> {
db.raw_items_with_source_map(file_id).0 db.raw_items_with_source_map(file_id).0
} }
pub(crate) fn raw_items_with_source_map_query( pub(crate) fn raw_items_with_source_map_query(
db: &(impl DefDatabase + AstDatabase), db: &(impl DefDatabase2 + AstDatabase),
file_id: HirFileId, file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>) { ) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
let mut collector = RawItemsCollector { let mut collector = RawItemsCollector {
@ -75,7 +75,7 @@ impl RawItems {
source_ast_id_map: db.ast_id_map(file_id), source_ast_id_map: db.ast_id_map(file_id),
source_map: ImportSourceMap::default(), source_map: ImportSourceMap::default(),
file_id, file_id,
db, hygiene: Hygiene::new(db, file_id),
}; };
if let Some(node) = db.parse_or_expand(file_id) { if let Some(node) = db.parse_or_expand(file_id) {
if let Some(source_file) = ast::SourceFile::cast(node.clone()) { if let Some(source_file) = ast::SourceFile::cast(node.clone()) {
@ -87,7 +87,7 @@ impl RawItems {
(Arc::new(collector.raw_items), Arc::new(collector.source_map)) (Arc::new(collector.raw_items), Arc::new(collector.source_map))
} }
pub(super) fn items(&self) -> &[RawItem] { pub fn items(&self) -> &[RawItem] {
&self.items &self.items
} }
} }
@ -124,19 +124,19 @@ impl Index<Macro> for RawItems {
type Attrs = Option<Arc<[Attr]>>; type Attrs = Option<Arc<[Attr]>>;
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]
pub(super) struct RawItem { pub struct RawItem {
attrs: Attrs, attrs: Attrs,
pub(super) kind: RawItemKind, pub kind: RawItemKind,
} }
impl RawItem { impl RawItem {
pub(super) fn attrs(&self) -> &[Attr] { pub fn attrs(&self) -> &[Attr] {
self.attrs.as_ref().map_or(&[], |it| &*it) self.attrs.as_ref().map_or(&[], |it| &*it)
} }
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(super) enum RawItemKind { pub enum RawItemKind {
Module(Module), Module(Module),
Import(ImportId), Import(ImportId),
Def(Def), Def(Def),
@ -144,11 +144,11 @@ pub(super) enum RawItemKind {
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(super) struct Module(RawId); pub struct Module(RawId);
impl_arena_id!(Module); impl_arena_id!(Module);
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub(super) enum ModuleData { pub enum ModuleData {
Declaration { name: Name, ast_id: FileAstId<ast::Module> }, Declaration { name: Name, ast_id: FileAstId<ast::Module> },
Definition { name: Name, ast_id: FileAstId<ast::Module>, items: Vec<RawItem> }, Definition { name: Name, ast_id: FileAstId<ast::Module>, items: Vec<RawItem> },
} }
@ -159,26 +159,26 @@ impl_arena_id!(ImportId);
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct ImportData { pub struct ImportData {
pub(super) path: Path, pub path: Path,
pub(super) alias: Option<Name>, pub alias: Option<Name>,
pub(super) is_glob: bool, pub is_glob: bool,
pub(super) is_prelude: bool, pub is_prelude: bool,
pub(super) is_extern_crate: bool, pub is_extern_crate: bool,
pub(super) is_macro_use: bool, pub is_macro_use: bool,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(super) struct Def(RawId); pub struct Def(RawId);
impl_arena_id!(Def); impl_arena_id!(Def);
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub(super) struct DefData { pub struct DefData {
pub(super) name: Name, pub name: Name,
pub(super) kind: DefKind, pub kind: DefKind,
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(super) enum DefKind { pub enum DefKind {
Function(FileAstId<ast::FnDef>), Function(FileAstId<ast::FnDef>),
Struct(FileAstId<ast::StructDef>), Struct(FileAstId<ast::StructDef>),
Union(FileAstId<ast::StructDef>), Union(FileAstId<ast::StructDef>),
@ -190,26 +190,26 @@ pub(super) enum DefKind {
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(super) struct Macro(RawId); pub struct Macro(RawId);
impl_arena_id!(Macro); impl_arena_id!(Macro);
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub(super) struct MacroData { pub struct MacroData {
pub(super) ast_id: FileAstId<ast::MacroCall>, pub ast_id: FileAstId<ast::MacroCall>,
pub(super) path: Path, pub path: Path,
pub(super) name: Option<Name>, pub name: Option<Name>,
pub(super) export: bool, pub export: bool,
} }
struct RawItemsCollector<DB> { struct RawItemsCollector {
raw_items: RawItems, raw_items: RawItems,
source_ast_id_map: Arc<AstIdMap>, source_ast_id_map: Arc<AstIdMap>,
source_map: ImportSourceMap, source_map: ImportSourceMap,
file_id: HirFileId, file_id: HirFileId,
db: DB, hygiene: Hygiene,
} }
impl<DB: AstDatabase> RawItemsCollector<&DB> { impl RawItemsCollector {
fn process_module(&mut self, current_module: Option<Module>, body: impl ast::ModuleItemOwner) { fn process_module(&mut self, current_module: Option<Module>, body: impl ast::ModuleItemOwner) {
for item_or_macro in body.items_with_macros() { for item_or_macro in body.items_with_macros() {
match item_or_macro { match item_or_macro {
@ -297,7 +297,8 @@ impl<DB: AstDatabase> RawItemsCollector<&DB> {
self.push_item(current_module, attrs, RawItemKind::Module(item)); self.push_item(current_module, attrs, RawItemKind::Module(item));
return; return;
} }
tested_by!(name_res_works_for_broken_modules); // FIXME: restore this mark once we complete hir splitting
// tested_by!(name_res_works_for_broken_modules);
} }
fn add_use_item(&mut self, current_module: Option<Module>, use_item: ast::UseItem) { fn add_use_item(&mut self, current_module: Option<Module>, use_item: ast::UseItem) {
@ -305,9 +306,10 @@ impl<DB: AstDatabase> RawItemsCollector<&DB> {
let is_prelude = use_item.has_atom_attr("prelude_import"); let is_prelude = use_item.has_atom_attr("prelude_import");
let attrs = self.parse_attrs(&use_item); let attrs = self.parse_attrs(&use_item);
let mut buf = Vec::new();
Path::expand_use_item( Path::expand_use_item(
Source { ast: use_item, file_id: self.file_id }, Source { ast: use_item, file_id: self.file_id },
self.db, &self.hygiene,
|path, use_tree, is_glob, alias| { |path, use_tree, is_glob, alias| {
let import_data = ImportData { let import_data = ImportData {
path, path,
@ -317,14 +319,12 @@ impl<DB: AstDatabase> RawItemsCollector<&DB> {
is_extern_crate: false, is_extern_crate: false,
is_macro_use: false, is_macro_use: false,
}; };
self.push_import( buf.push((import_data, Either::A(AstPtr::new(use_tree))));
current_module,
attrs.clone(),
import_data,
Either::A(AstPtr::new(use_tree)),
);
}, },
) );
for (import_data, ptr) in buf {
self.push_import(current_module, attrs.clone(), import_data, ptr);
}
} }
fn add_extern_crate_item( fn add_extern_crate_item(
@ -357,10 +357,7 @@ impl<DB: AstDatabase> RawItemsCollector<&DB> {
fn add_macro(&mut self, current_module: Option<Module>, m: ast::MacroCall) { fn add_macro(&mut self, current_module: Option<Module>, m: ast::MacroCall) {
let attrs = self.parse_attrs(&m); let attrs = self.parse_attrs(&m);
let path = match m let path = match m.path().and_then(|path| Path::from_src(path, &self.hygiene)) {
.path()
.and_then(|path| Path::from_src(Source { ast: path, file_id: self.file_id }, self.db))
{
Some(it) => it, Some(it) => it,
_ => return, _ => return,
}; };
@ -398,6 +395,6 @@ impl<DB: AstDatabase> RawItemsCollector<&DB> {
} }
fn parse_attrs(&self, item: &impl ast::AttrsOwner) -> Attrs { fn parse_attrs(&self, item: &impl ast::AttrsOwner) -> Attrs {
Attr::from_attrs_owner(self.file_id, item, self.db) Attr::from_attrs_owner(item, &self.hygiene)
} }
} }

View file

@ -2,12 +2,18 @@
use std::{iter, sync::Arc}; use std::{iter, sync::Arc};
use hir_expand::{
either::Either,
hygiene::Hygiene,
name::{self, AsName, Name},
};
use ra_db::CrateId;
use ra_syntax::{ use ra_syntax::{
ast::{self, NameOwner, TypeAscriptionOwner}, ast::{self, NameOwner, TypeAscriptionOwner},
AstNode, AstNode,
}; };
use crate::{db::AstDatabase, name, type_ref::TypeRef, AsName, Crate, Name, Source}; use crate::{type_ref::TypeRef, Source};
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Path { pub struct Path {
@ -55,18 +61,18 @@ pub enum PathKind {
// Type based path like `<T>::foo` // Type based path like `<T>::foo`
Type(Box<TypeRef>), Type(Box<TypeRef>),
// `$crate` from macro expansion // `$crate` from macro expansion
DollarCrate(Crate), DollarCrate(CrateId),
} }
impl Path { impl Path {
/// Calls `cb` with all paths, represented by this use item. /// Calls `cb` with all paths, represented by this use item.
pub fn expand_use_item( pub fn expand_use_item(
item_src: Source<ast::UseItem>, item_src: Source<ast::UseItem>,
db: &impl AstDatabase, hygiene: &Hygiene,
mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>), mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
) { ) {
if let Some(tree) = item_src.ast.use_tree() { if let Some(tree) = item_src.ast.use_tree() {
expand_use_tree(None, tree, &|| item_src.file_id.macro_crate(db), &mut cb); expand_use_tree(None, tree, hygiene, &mut cb);
} }
} }
@ -83,17 +89,12 @@ impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// DEPRECATED: It does not handle `$crate` from macro call. /// DEPRECATED: It does not handle `$crate` from macro call.
pub fn from_ast(path: ast::Path) -> Option<Path> { pub fn from_ast(path: ast::Path) -> Option<Path> {
Path::parse(path, &|| None) Path::from_src(path, &Hygiene::new_unhygienic())
} }
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call. /// It correctly handles `$crate` based path from macro call.
pub fn from_src(source: Source<ast::Path>, db: &impl AstDatabase) -> Option<Path> { pub fn from_src(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path> {
let file_id = source.file_id;
Path::parse(source.ast, &|| file_id.macro_crate(db))
}
fn parse(mut path: ast::Path, macro_crate: &impl Fn() -> Option<Crate>) -> Option<Path> {
let mut kind = PathKind::Plain; let mut kind = PathKind::Plain;
let mut segments = Vec::new(); let mut segments = Vec::new();
loop { loop {
@ -104,26 +105,28 @@ impl Path {
} }
match segment.kind()? { match segment.kind()? {
ast::PathSegmentKind::Name(name) => { ast::PathSegmentKind::Name(name_ref) => {
if name.text() == "$crate" { // FIXME: this should just return name
if let Some(macro_crate) = macro_crate() { match hygiene.name_ref_to_name(name_ref) {
kind = PathKind::DollarCrate(macro_crate); Either::A(name) => {
let args = segment
.type_arg_list()
.and_then(GenericArgs::from_ast)
.or_else(|| {
GenericArgs::from_fn_like_path_ast(
segment.param_list(),
segment.ret_type(),
)
})
.map(Arc::new);
let segment = PathSegment { name, args_and_bindings: args };
segments.push(segment);
}
Either::B(crate_id) => {
kind = PathKind::DollarCrate(crate_id);
break; break;
} }
} }
let args = segment
.type_arg_list()
.and_then(GenericArgs::from_ast)
.or_else(|| {
GenericArgs::from_fn_like_path_ast(
segment.param_list(),
segment.ret_type(),
)
})
.map(Arc::new);
let segment = PathSegment { name: name.as_name(), args_and_bindings: args };
segments.push(segment);
} }
ast::PathSegmentKind::Type { type_ref, trait_ref } => { ast::PathSegmentKind::Type { type_ref, trait_ref } => {
assert!(path.qualifier().is_none()); // this can only occur at the first segment assert!(path.qualifier().is_none()); // this can only occur at the first segment
@ -137,7 +140,7 @@ impl Path {
} }
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo // <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => { Some(trait_ref) => {
let path = Path::parse(trait_ref.path()?, macro_crate)?; let path = Path::from_src(trait_ref.path()?, hygiene)?;
kind = path.kind; kind = path.kind;
let mut prefix_segments = path.segments; let mut prefix_segments = path.segments;
prefix_segments.reverse(); prefix_segments.reverse();
@ -224,7 +227,7 @@ impl Path {
} }
impl GenericArgs { impl GenericArgs {
pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> { pub fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> {
let mut args = Vec::new(); let mut args = Vec::new();
for type_arg in node.type_args() { for type_arg in node.type_args() {
let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); let type_ref = TypeRef::from_ast_opt(type_arg.type_ref());
@ -288,8 +291,8 @@ impl From<Name> for Path {
fn expand_use_tree( fn expand_use_tree(
prefix: Option<Path>, prefix: Option<Path>,
tree: ast::UseTree, tree: ast::UseTree,
macro_crate: &impl Fn() -> Option<Crate>, hygiene: &Hygiene,
cb: &mut impl FnMut(Path, &ast::UseTree, bool, Option<Name>), cb: &mut dyn FnMut(Path, &ast::UseTree, bool, Option<Name>),
) { ) {
if let Some(use_tree_list) = tree.use_tree_list() { if let Some(use_tree_list) = tree.use_tree_list() {
let prefix = match tree.path() { let prefix = match tree.path() {
@ -297,13 +300,13 @@ fn expand_use_tree(
None => prefix, None => prefix,
// E.g. `use something::{inner}` (prefix is `None`, path is `something`) // E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => match convert_path(prefix, path, macro_crate) { Some(path) => match convert_path(prefix, path, hygiene) {
Some(it) => Some(it), Some(it) => Some(it),
None => return, // FIXME: report errors somewhere None => return, // FIXME: report errors somewhere
}, },
}; };
for child_tree in use_tree_list.use_trees() { for child_tree in use_tree_list.use_trees() {
expand_use_tree(prefix.clone(), child_tree, macro_crate, cb); expand_use_tree(prefix.clone(), child_tree, hygiene, cb);
} }
} else { } else {
let alias = tree.alias().and_then(|a| a.name()).map(|a| a.as_name()); let alias = tree.alias().and_then(|a| a.name()).map(|a| a.as_name());
@ -320,7 +323,7 @@ fn expand_use_tree(
} }
} }
} }
if let Some(path) = convert_path(prefix, ast_path, macro_crate) { if let Some(path) = convert_path(prefix, ast_path, hygiene) {
let is_glob = tree.has_star(); let is_glob = tree.has_star();
cb(path, &tree, is_glob, alias) cb(path, &tree, is_glob, alias)
} }
@ -330,37 +333,36 @@ fn expand_use_tree(
} }
} }
fn convert_path( fn convert_path(prefix: Option<Path>, path: ast::Path, hygiene: &Hygiene) -> Option<Path> {
prefix: Option<Path>,
path: ast::Path,
macro_crate: &impl Fn() -> Option<Crate>,
) -> Option<Path> {
let prefix = if let Some(qual) = path.qualifier() { let prefix = if let Some(qual) = path.qualifier() {
Some(convert_path(prefix, qual, macro_crate)?) Some(convert_path(prefix, qual, hygiene)?)
} else { } else {
prefix prefix
}; };
let segment = path.segment()?; let segment = path.segment()?;
let res = match segment.kind()? { let res = match segment.kind()? {
ast::PathSegmentKind::Name(name) => { ast::PathSegmentKind::Name(name_ref) => {
if name.text() == "$crate" { match hygiene.name_ref_to_name(name_ref) {
if let Some(krate) = macro_crate() { Either::A(name) => {
// no type args in use
let mut res = prefix.unwrap_or_else(|| Path {
kind: PathKind::Plain,
segments: Vec::with_capacity(1),
});
res.segments.push(PathSegment {
name,
args_and_bindings: None, // no type args in use
});
res
}
Either::B(crate_id) => {
return Some(Path::from_simple_segments( return Some(Path::from_simple_segments(
PathKind::DollarCrate(krate), PathKind::DollarCrate(crate_id),
iter::empty(), iter::empty(),
)); ))
} }
} }
// no type args in use
let mut res = prefix
.unwrap_or_else(|| Path { kind: PathKind::Plain, segments: Vec::with_capacity(1) });
res.segments.push(PathSegment {
name: name.as_name(),
args_and_bindings: None, // no type args in use
});
res
} }
ast::PathSegmentKind::CrateKw => { ast::PathSegmentKind::CrateKw => {
if prefix.is_some() { if prefix.is_some() {
@ -389,8 +391,9 @@ fn convert_path(
} }
pub mod known { pub mod known {
use hir_expand::name;
use super::{Path, PathKind}; use super::{Path, PathKind};
use crate::name;
pub fn std_iter_into_iterator() -> Path { pub fn std_iter_into_iterator() -> Path {
Path::from_simple_segments( Path::from_simple_segments(

View file

@ -3,7 +3,7 @@
use ra_syntax::ast::{self, TypeAscriptionOwner, TypeBoundsOwner}; use ra_syntax::ast::{self, TypeAscriptionOwner, TypeBoundsOwner};
use crate::Path; use crate::path::Path;
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum Mutability { pub enum Mutability {
@ -64,7 +64,7 @@ pub enum TypeBound {
impl TypeRef { impl TypeRef {
/// Converts an `ast::TypeRef` to a `hir::TypeRef`. /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
pub(crate) fn from_ast(node: ast::TypeRef) -> Self { pub fn from_ast(node: ast::TypeRef) -> Self {
match node { match node {
ast::TypeRef::ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), ast::TypeRef::ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
ast::TypeRef::TupleType(inner) => { ast::TypeRef::TupleType(inner) => {
@ -113,7 +113,7 @@ impl TypeRef {
} }
} }
pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self { pub fn from_ast_opt(node: Option<ast::TypeRef>) -> Self {
if let Some(node) = node { if let Some(node) = node {
TypeRef::from_ast(node) TypeRef::from_ast(node)
} else { } else {
@ -135,7 +135,7 @@ pub(crate) fn type_bounds_from_ast(type_bounds_opt: Option<ast::TypeBoundList>)
} }
impl TypeBound { impl TypeBound {
pub(crate) fn from_ast(node: ast::TypeBound) -> Self { pub fn from_ast(node: ast::TypeBound) -> Self {
match node.kind() { match node.kind() {
ast::TypeBoundKind::PathType(path_type) => { ast::TypeBoundKind::PathType(path_type) => {
let path = match path_type.path() { let path = match path_type.path() {

View file

@ -0,0 +1,15 @@
[package]
edition = "2018"
name = "ra_hir_expand"
version = "0.1.0"
authors = ["rust-analyzer developers"]
[dependencies]
log = "0.4.5"
ra_arena = { path = "../ra_arena" }
ra_db = { path = "../ra_db" }
ra_syntax = { path = "../ra_syntax" }
ra_prof = { path = "../ra_prof" }
tt = { path = "../ra_tt", package = "ra_tt" }
mbe = { path = "../ra_mbe", package = "ra_mbe" }

View file

@ -1,58 +1,21 @@
//! FIXME: write short doc here //! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items
//! and macro calls.
//!
//! Specifically, it enumerates all items in a file and uses position of a an
//! item as an ID. That way, id's don't change unless the set of items itself
//! changes.
use std::{ use std::{
hash::{Hash, Hasher}, hash::{Hash, Hasher},
marker::PhantomData, marker::PhantomData,
sync::Arc,
}; };
use ra_arena::{impl_arena_id, Arena, RawId}; use ra_arena::{impl_arena_id, Arena, RawId};
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
use crate::{db::AstDatabase, HirFileId};
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.
#[derive(Debug)]
pub(crate) struct AstId<N: AstNode> {
file_id: HirFileId,
file_ast_id: FileAstId<N>,
}
impl<N: AstNode> Clone for AstId<N> {
fn clone(&self) -> AstId<N> {
*self
}
}
impl<N: AstNode> Copy for AstId<N> {}
impl<N: AstNode> PartialEq for AstId<N> {
fn eq(&self, other: &Self) -> bool {
(self.file_id, self.file_ast_id) == (other.file_id, other.file_ast_id)
}
}
impl<N: AstNode> Eq for AstId<N> {}
impl<N: AstNode> Hash for AstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
(self.file_id, self.file_ast_id).hash(hasher);
}
}
impl<N: AstNode> AstId<N> {
pub(crate) fn file_id(&self) -> HirFileId {
self.file_id
}
pub(crate) fn to_node(&self, db: &impl AstDatabase) -> N {
let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw);
N::cast(syntax_node).unwrap()
}
}
/// `AstId` points to an AST node in a specific file. /// `AstId` points to an AST node in a specific file.
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct FileAstId<N: AstNode> { pub struct FileAstId<N: AstNode> {
raw: ErasedFileAstId, raw: ErasedFileAstId,
_ty: PhantomData<fn() -> N>, _ty: PhantomData<fn() -> N>,
} }
@ -76,14 +39,8 @@ impl<N: AstNode> Hash for FileAstId<N> {
} }
} }
impl<N: AstNode> FileAstId<N> {
pub(crate) fn with_file_id(self, file_id: HirFileId) -> AstId<N> {
AstId { file_id, file_ast_id: self }
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ErasedFileAstId(RawId); struct ErasedFileAstId(RawId);
impl_arena_id!(ErasedFileAstId); impl_arena_id!(ErasedFileAstId);
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back. /// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.
@ -93,39 +50,7 @@ pub struct AstIdMap {
} }
impl AstIdMap { impl AstIdMap {
pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
let map = if let Some(node) = db.parse_or_expand(file_id) {
AstIdMap::from_source(&node)
} else {
AstIdMap::default()
};
Arc::new(map)
}
pub(crate) fn file_item_query(
db: &impl AstDatabase,
file_id: HirFileId,
ast_id: ErasedFileAstId,
) -> SyntaxNode {
let node = db.parse_or_expand(file_id).unwrap();
db.ast_id_map(file_id).arena[ast_id].to_node(&node)
}
pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
let ptr = SyntaxNodePtr::new(item.syntax());
let raw = match self.arena.iter().find(|(_id, i)| **i == ptr) {
Some((it, _)) => it,
None => panic!(
"Can't find {:?} in AstIdMap:\n{:?}",
item.syntax(),
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
),
};
FileAstId { raw, _ty: PhantomData }
}
fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none()); assert!(node.parent().is_none());
let mut res = AstIdMap { arena: Arena::default() }; let mut res = AstIdMap { arena: Arena::default() };
// By walking the tree in bread-first order we make sure that parents // By walking the tree in bread-first order we make sure that parents
@ -142,6 +67,26 @@ impl AstIdMap {
res res
} }
pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, _ty: PhantomData }
}
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
let ptr = SyntaxNodePtr::new(item);
match self.arena.iter().find(|(_id, i)| **i == ptr) {
Some((it, _)) => it,
None => panic!(
"Can't find {:?} in AstIdMap:\n{:?}",
item,
self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(),
),
}
}
pub(crate) fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
self.arena[id.raw].cast::<N>().unwrap()
}
fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId { fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
self.arena.alloc(SyntaxNodePtr::new(item)) self.arena.alloc(SyntaxNodePtr::new(item))
} }

View file

@ -0,0 +1,104 @@
//! Defines database & queries for macro expansion.
use std::sync::Arc;
use mbe::MacroRules;
use ra_db::{salsa, SourceDatabase};
use ra_prof::profile;
use ra_syntax::{AstNode, Parse, SyntaxNode};
use crate::{
ast_id_map::AstIdMap, HirFileId, HirFileIdRepr, MacroCallId, MacroCallLoc, MacroDefId,
MacroFile, MacroFileKind,
};
// FIXME: rename to ExpandDatabase
#[salsa::query_group(AstDatabaseStorage)]
pub trait AstDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
#[salsa::transparent]
fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
#[salsa::interned]
fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId;
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<tt::Subtree>>;
fn macro_def(&self, id: MacroDefId) -> Option<Arc<mbe::MacroRules>>;
fn parse_macro(&self, macro_file: MacroFile) -> Option<Parse<SyntaxNode>>;
fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>;
}
pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
let map =
db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it));
Arc::new(map)
}
pub(crate) fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> {
let macro_call = id.ast_id.to_node(db);
let arg = macro_call.token_tree()?;
let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| {
log::warn!("fail on macro_def to token tree: {:#?}", arg);
None
})?;
let rules = MacroRules::parse(&tt).ok().or_else(|| {
log::warn!("fail on macro_def parse: {:#?}", tt);
None
})?;
Some(Arc::new(rules))
}
pub(crate) fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro(id);
let macro_call = loc.ast_id.to_node(db);
let arg = macro_call.token_tree()?;
let (tt, _) = mbe::ast_to_token_tree(&arg)?;
Some(Arc::new(tt))
}
pub(crate) fn macro_expand(
db: &dyn AstDatabase,
id: MacroCallId,
) -> Result<Arc<tt::Subtree>, String> {
let loc = db.lookup_intern_macro(id);
let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?;
let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
let tt = macro_rules.expand(&macro_arg).map_err(|err| format!("{:?}", err))?;
// Set a hard limit for the expanded tt
let count = tt.count();
if count > 65536 {
return Err(format!("Total tokens count exceed limit : count = {}", count));
}
Ok(Arc::new(tt))
}
pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
match file_id.0 {
HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro(macro_file).map(|it| it.syntax_node())
}
}
}
pub(crate) fn parse_macro(
db: &dyn AstDatabase,
macro_file: MacroFile,
) -> Option<Parse<SyntaxNode>> {
let _p = profile("parse_macro_query");
let macro_call_id = macro_file.macro_call_id;
let tt = db
.macro_expand(macro_call_id)
.map_err(|err| {
// Note:
// The final goal we would like to make all parse_macro success,
// such that the following log will not call anyway.
log::warn!("fail on macro_parse: (reason: {})", err,);
})
.ok()?;
match macro_file.macro_file_kind {
MacroFileKind::Items => mbe::token_tree_to_items(&tt).ok().map(Parse::to_syntax),
MacroFileKind::Expr => mbe::token_tree_to_expr(&tt).ok().map(Parse::to_syntax),
}
}

View file

@ -0,0 +1,46 @@
//! This modules handles hygiene information.
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
use ra_db::CrateId;
use ra_syntax::ast;
use crate::{
db::AstDatabase,
either::Either,
name::{AsName, Name},
HirFileId, HirFileIdRepr,
};
#[derive(Debug)]
pub struct Hygiene {
// This is what `$crate` expands to
def_crate: Option<CrateId>,
}
impl Hygiene {
pub fn new(db: &impl AstDatabase, file_id: HirFileId) -> Hygiene {
let def_crate = match file_id.0 {
HirFileIdRepr::FileId(_) => None,
HirFileIdRepr::MacroFile(macro_file) => {
let loc = db.lookup_intern_macro(macro_file.macro_call_id);
Some(loc.def.krate)
}
};
Hygiene { def_crate }
}
pub fn new_unhygienic() -> Hygiene {
Hygiene { def_crate: None }
}
// FIXME: this should just return name
pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either<Name, CrateId> {
if let Some(def_crate) = self.def_crate {
if name_ref.text() == "$crate" {
return Either::B(def_crate);
}
}
Either::A(name_ref.as_name())
}
}

View file

@ -0,0 +1,153 @@
//! `ra_hir_expand` deals with macro expansion.
//!
//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax
//! tree originates not from the text of some `FileId`, but from some macro
//! expansion.
pub mod db;
pub mod ast_id_map;
pub mod either;
pub mod name;
pub mod hygiene;
use std::hash::{Hash, Hasher};
use ra_db::{salsa, CrateId, FileId};
use ra_syntax::ast::{self, AstNode};
use crate::ast_id_map::FileAstId;
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
/// file". To assign an id to such a file, we use the id of the macro call that
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
/// written by user), or a `MacroCallId` (source code produced by macro).
///
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
/// containing the call plus the offset of the macro call in the file. Note that
/// this is a recursive definition! However, the size_of of `HirFileId` is
/// finite (because everything bottoms out at the real `FileId`) and small
/// (`MacroCallId` uses the location interner).
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct HirFileId(HirFileIdRepr);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
enum HirFileIdRepr {
FileId(FileId),
MacroFile(MacroFile),
}
impl From<FileId> for HirFileId {
fn from(id: FileId) -> Self {
HirFileId(HirFileIdRepr::FileId(id))
}
}
impl From<MacroFile> for HirFileId {
fn from(id: MacroFile) -> Self {
HirFileId(HirFileIdRepr::MacroFile(id))
}
}
impl HirFileId {
/// For macro-expansion files, returns the file original source file the
/// expansion originated from.
pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId {
match self.0 {
HirFileIdRepr::FileId(file_id) => file_id,
HirFileIdRepr::MacroFile(macro_file) => {
let loc = db.lookup_intern_macro(macro_file.macro_call_id);
loc.ast_id.file_id().original_file(db)
}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFile {
macro_call_id: MacroCallId,
macro_file_kind: MacroFileKind,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum MacroFileKind {
Items,
Expr,
}
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroCallId(salsa::InternId);
impl salsa::InternKey for MacroCallId {
fn from_intern_id(v: salsa::InternId) -> Self {
MacroCallId(v)
}
fn as_intern_id(&self) -> salsa::InternId {
self.0
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroDefId {
pub krate: CrateId,
pub ast_id: AstId<ast::MacroCall>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MacroCallLoc {
pub def: MacroDefId,
pub ast_id: AstId<ast::MacroCall>,
}
impl MacroCallId {
pub fn as_file(self, kind: MacroFileKind) -> HirFileId {
let macro_file = MacroFile { macro_call_id: self, macro_file_kind: kind };
macro_file.into()
}
}
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.
// FIXME: isn't this just a `Source<FileAstId<N>>` ?
#[derive(Debug)]
pub struct AstId<N: AstNode> {
file_id: HirFileId,
file_ast_id: FileAstId<N>,
}
impl<N: AstNode> Clone for AstId<N> {
fn clone(&self) -> AstId<N> {
*self
}
}
impl<N: AstNode> Copy for AstId<N> {}
impl<N: AstNode> PartialEq for AstId<N> {
fn eq(&self, other: &Self) -> bool {
(self.file_id, self.file_ast_id) == (other.file_id, other.file_ast_id)
}
}
impl<N: AstNode> Eq for AstId<N> {}
impl<N: AstNode> Hash for AstId<N> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
(self.file_id, self.file_ast_id).hash(hasher);
}
}
impl<N: AstNode> AstId<N> {
pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
AstId { file_id, file_ast_id }
}
pub fn file_id(&self) -> HirFileId {
self.file_id
}
pub fn to_node(&self, db: &dyn db::AstDatabase) -> N {
let root = db.parse_or_expand(self.file_id).unwrap();
db.ast_id_map(self.file_id).get(self.file_ast_id).to_node(&root)
}
}

View file

@ -33,7 +33,7 @@ impl Name {
Name(Repr::Text(text)) Name(Repr::Text(text))
} }
pub(crate) fn new_tuple_field(idx: usize) -> Name { pub fn new_tuple_field(idx: usize) -> Name {
Name(Repr::TupleField(idx)) Name(Repr::TupleField(idx))
} }
@ -52,11 +52,11 @@ impl Name {
} }
} }
pub(crate) fn missing() -> Name { pub fn missing() -> Name {
Name::new_text("[missing name]".into()) Name::new_text("[missing name]".into())
} }
pub(crate) fn as_tuple_index(&self) -> Option<usize> { pub fn as_tuple_index(&self) -> Option<usize> {
match self.0 { match self.0 {
Repr::TupleField(idx) => Some(idx), Repr::TupleField(idx) => Some(idx),
_ => None, _ => None,
@ -64,7 +64,7 @@ impl Name {
} }
} }
pub(crate) trait AsName { pub trait AsName {
fn as_name(&self) -> Name; fn as_name(&self) -> Name;
} }
@ -99,44 +99,44 @@ impl AsName for ra_db::Dependency {
} }
// Primitives // Primitives
pub(crate) const ISIZE: Name = Name::new_inline_ascii(5, b"isize"); pub const ISIZE: Name = Name::new_inline_ascii(5, b"isize");
pub(crate) const I8: Name = Name::new_inline_ascii(2, b"i8"); pub const I8: Name = Name::new_inline_ascii(2, b"i8");
pub(crate) const I16: Name = Name::new_inline_ascii(3, b"i16"); pub const I16: Name = Name::new_inline_ascii(3, b"i16");
pub(crate) const I32: Name = Name::new_inline_ascii(3, b"i32"); pub const I32: Name = Name::new_inline_ascii(3, b"i32");
pub(crate) const I64: Name = Name::new_inline_ascii(3, b"i64"); pub const I64: Name = Name::new_inline_ascii(3, b"i64");
pub(crate) const I128: Name = Name::new_inline_ascii(4, b"i128"); pub const I128: Name = Name::new_inline_ascii(4, b"i128");
pub(crate) const USIZE: Name = Name::new_inline_ascii(5, b"usize"); pub const USIZE: Name = Name::new_inline_ascii(5, b"usize");
pub(crate) const U8: Name = Name::new_inline_ascii(2, b"u8"); pub const U8: Name = Name::new_inline_ascii(2, b"u8");
pub(crate) const U16: Name = Name::new_inline_ascii(3, b"u16"); pub const U16: Name = Name::new_inline_ascii(3, b"u16");
pub(crate) const U32: Name = Name::new_inline_ascii(3, b"u32"); pub const U32: Name = Name::new_inline_ascii(3, b"u32");
pub(crate) const U64: Name = Name::new_inline_ascii(3, b"u64"); pub const U64: Name = Name::new_inline_ascii(3, b"u64");
pub(crate) const U128: Name = Name::new_inline_ascii(4, b"u128"); pub const U128: Name = Name::new_inline_ascii(4, b"u128");
pub(crate) const F32: Name = Name::new_inline_ascii(3, b"f32"); pub const F32: Name = Name::new_inline_ascii(3, b"f32");
pub(crate) const F64: Name = Name::new_inline_ascii(3, b"f64"); pub const F64: Name = Name::new_inline_ascii(3, b"f64");
pub(crate) const BOOL: Name = Name::new_inline_ascii(4, b"bool"); pub const BOOL: Name = Name::new_inline_ascii(4, b"bool");
pub(crate) const CHAR: Name = Name::new_inline_ascii(4, b"char"); pub const CHAR: Name = Name::new_inline_ascii(4, b"char");
pub(crate) const STR: Name = Name::new_inline_ascii(3, b"str"); pub const STR: Name = Name::new_inline_ascii(3, b"str");
// Special names // Special names
pub(crate) const SELF_PARAM: Name = Name::new_inline_ascii(4, b"self"); pub const SELF_PARAM: Name = Name::new_inline_ascii(4, b"self");
pub(crate) const SELF_TYPE: Name = Name::new_inline_ascii(4, b"Self"); pub const SELF_TYPE: Name = Name::new_inline_ascii(4, b"Self");
pub(crate) const MACRO_RULES: Name = Name::new_inline_ascii(11, b"macro_rules"); pub const MACRO_RULES: Name = Name::new_inline_ascii(11, b"macro_rules");
// Components of known path (value or mod name) // Components of known path (value or mod name)
pub(crate) const STD: Name = Name::new_inline_ascii(3, b"std"); pub const STD: Name = Name::new_inline_ascii(3, b"std");
pub(crate) const ITER: Name = Name::new_inline_ascii(4, b"iter"); pub const ITER: Name = Name::new_inline_ascii(4, b"iter");
pub(crate) const OPS: Name = Name::new_inline_ascii(3, b"ops"); pub const OPS: Name = Name::new_inline_ascii(3, b"ops");
pub(crate) const FUTURE: Name = Name::new_inline_ascii(6, b"future"); pub const FUTURE: Name = Name::new_inline_ascii(6, b"future");
pub(crate) const RESULT: Name = Name::new_inline_ascii(6, b"result"); pub const RESULT: Name = Name::new_inline_ascii(6, b"result");
pub(crate) const BOXED: Name = Name::new_inline_ascii(5, b"boxed"); pub const BOXED: Name = Name::new_inline_ascii(5, b"boxed");
// Components of known path (type name) // Components of known path (type name)
pub(crate) const INTO_ITERATOR_TYPE: Name = Name::new_inline_ascii(12, b"IntoIterator"); pub const INTO_ITERATOR_TYPE: Name = Name::new_inline_ascii(12, b"IntoIterator");
pub(crate) const ITEM_TYPE: Name = Name::new_inline_ascii(4, b"Item"); pub const ITEM_TYPE: Name = Name::new_inline_ascii(4, b"Item");
pub(crate) const TRY_TYPE: Name = Name::new_inline_ascii(3, b"Try"); pub const TRY_TYPE: Name = Name::new_inline_ascii(3, b"Try");
pub(crate) const OK_TYPE: Name = Name::new_inline_ascii(2, b"Ok"); pub const OK_TYPE: Name = Name::new_inline_ascii(2, b"Ok");
pub(crate) const FUTURE_TYPE: Name = Name::new_inline_ascii(6, b"Future"); pub const FUTURE_TYPE: Name = Name::new_inline_ascii(6, b"Future");
pub(crate) const RESULT_TYPE: Name = Name::new_inline_ascii(6, b"Result"); pub const RESULT_TYPE: Name = Name::new_inline_ascii(6, b"Result");
pub(crate) const OUTPUT_TYPE: Name = Name::new_inline_ascii(6, b"Output"); pub const OUTPUT_TYPE: Name = Name::new_inline_ascii(6, b"Output");
pub(crate) const TARGET_TYPE: Name = Name::new_inline_ascii(6, b"Target"); pub const TARGET_TYPE: Name = Name::new_inline_ascii(6, b"Target");
pub(crate) const BOX_TYPE: Name = Name::new_inline_ascii(3, b"Box"); pub const BOX_TYPE: Name = Name::new_inline_ascii(3, b"Box");

View file

@ -27,10 +27,13 @@ ra_db = { path = "../ra_db" }
ra_cfg = { path = "../ra_cfg" } ra_cfg = { path = "../ra_cfg" }
ra_fmt = { path = "../ra_fmt" } ra_fmt = { path = "../ra_fmt" }
ra_prof = { path = "../ra_prof" } ra_prof = { path = "../ra_prof" }
hir = { path = "../ra_hir", package = "ra_hir" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }
ra_assists = { path = "../ra_assists" } ra_assists = { path = "../ra_assists" }
# ra_ide_api should depend only on the top-level `hir` package. if you need
# something from some `hir_xxx` subpackage, reexport the API via `hir`.
hir = { path = "../ra_hir", package = "ra_hir" }
[dev-dependencies] [dev-dependencies]
insta = "0.12.0" insta = "0.12.0"

View file

@ -2,9 +2,9 @@
use ra_db::SourceDatabase; use ra_db::SourceDatabase;
use ra_syntax::{ use ra_syntax::{
algo::find_node_at_offset, algo::ancestors_at_offset,
ast::{self, ArgListOwner}, ast::{self, ArgListOwner},
AstNode, SyntaxNode, TextUnit, match_ast, AstNode, SyntaxNode, TextUnit,
}; };
use test_utils::tested_by; use test_utils::tested_by;
@ -20,24 +20,30 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
let name_ref = calling_node.name_ref()?; let name_ref = calling_node.name_ref()?;
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
let function = match &calling_node { let (mut call_info, has_self) = match &calling_node {
FnCallNode::CallExpr(expr) => { FnCallNode::CallExpr(expr) => {
//FIXME: apply subst //FIXME: apply subst
let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
match callable_def { match callable_def {
hir::CallableDef::Function(it) => it, hir::CallableDef::Function(it) => {
//FIXME: handle other callables (CallInfo::with_fn(db, it), it.data(db).has_self_param())
_ => return None, }
hir::CallableDef::Struct(it) => (CallInfo::with_struct(db, it)?, false),
hir::CallableDef::EnumVariant(it) => (CallInfo::with_enum_variant(db, it)?, false),
} }
} }
FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(&expr)?, FnCallNode::MethodCallExpr(expr) => {
let function = analyzer.resolve_method_call(&expr)?;
(CallInfo::with_fn(db, function), function.data(db).has_self_param())
}
FnCallNode::MacroCallExpr(expr) => {
let macro_def = analyzer.resolve_macro_call(db, &expr)?;
(CallInfo::with_macro(db, macro_def)?, false)
}
}; };
let mut call_info = CallInfo::new(db, function);
// If we have a calling expression let's find which argument we are on // If we have a calling expression let's find which argument we are on
let num_params = call_info.parameters().len(); let num_params = call_info.parameters().len();
let has_self = function.data(db).has_self_param();
if num_params == 1 { if num_params == 1 {
if !has_self { if !has_self {
@ -75,20 +81,25 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
Some(call_info) Some(call_info)
} }
#[derive(Debug)]
enum FnCallNode { enum FnCallNode {
CallExpr(ast::CallExpr), CallExpr(ast::CallExpr),
MethodCallExpr(ast::MethodCallExpr), MethodCallExpr(ast::MethodCallExpr),
MacroCallExpr(ast::MacroCall),
} }
impl FnCallNode { impl FnCallNode {
fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option<FnCallNode> { fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option<FnCallNode> {
if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) { ancestors_at_offset(syntax, offset).find_map(|node| {
return Some(FnCallNode::CallExpr(expr)); match_ast! {
} match node {
if let Some(expr) = find_node_at_offset::<ast::MethodCallExpr>(syntax, offset) { ast::CallExpr(it) => { Some(FnCallNode::CallExpr(it)) },
return Some(FnCallNode::MethodCallExpr(expr)); ast::MethodCallExpr(it) => { Some(FnCallNode::MethodCallExpr(it)) },
} ast::MacroCall(it) => { Some(FnCallNode::MacroCallExpr(it)) },
None _ => { None },
}
}
})
} }
fn name_ref(&self) -> Option<ast::NameRef> { fn name_ref(&self) -> Option<ast::NameRef> {
@ -101,6 +112,8 @@ impl FnCallNode {
FnCallNode::MethodCallExpr(call_expr) => { FnCallNode::MethodCallExpr(call_expr) => {
call_expr.syntax().children().filter_map(ast::NameRef::cast).nth(0) call_expr.syntax().children().filter_map(ast::NameRef::cast).nth(0)
} }
FnCallNode::MacroCallExpr(call_expr) => call_expr.path()?.segment()?.name_ref(),
} }
} }
@ -108,17 +121,36 @@ impl FnCallNode {
match self { match self {
FnCallNode::CallExpr(expr) => expr.arg_list(), FnCallNode::CallExpr(expr) => expr.arg_list(),
FnCallNode::MethodCallExpr(expr) => expr.arg_list(), FnCallNode::MethodCallExpr(expr) => expr.arg_list(),
FnCallNode::MacroCallExpr(_) => None,
} }
} }
} }
impl CallInfo { impl CallInfo {
fn new(db: &RootDatabase, function: hir::Function) -> Self { fn with_fn(db: &RootDatabase, function: hir::Function) -> Self {
let signature = FunctionSignature::from_hir(db, function); let signature = FunctionSignature::from_hir(db, function);
CallInfo { signature, active_parameter: None } CallInfo { signature, active_parameter: None }
} }
fn with_struct(db: &RootDatabase, st: hir::Struct) -> Option<Self> {
let signature = FunctionSignature::from_struct(db, st)?;
Some(CallInfo { signature, active_parameter: None })
}
fn with_enum_variant(db: &RootDatabase, variant: hir::EnumVariant) -> Option<Self> {
let signature = FunctionSignature::from_enum_variant(db, variant)?;
Some(CallInfo { signature, active_parameter: None })
}
fn with_macro(db: &RootDatabase, macro_def: hir::MacroDef) -> Option<Self> {
let signature = FunctionSignature::from_macro(db, macro_def)?;
Some(CallInfo { signature, active_parameter: None })
}
fn parameters(&self) -> &[String] { fn parameters(&self) -> &[String] {
&self.signature.parameters &self.signature.parameters
} }
@ -415,6 +447,7 @@ pub fn foo(mut r: WriteHandler<()>) {
"#, "#,
); );
assert_eq!(info.label(), "fn finished(&mut self, ctx: &mut Self::Context)".to_string());
assert_eq!(info.parameters(), ["&mut self", "ctx: &mut Self::Context"]); assert_eq!(info.parameters(), ["&mut self", "ctx: &mut Self::Context"]);
assert_eq!(info.active_parameter, Some(1)); assert_eq!(info.active_parameter, Some(1));
assert_eq!( assert_eq!(
@ -438,4 +471,118 @@ By default this method stops actor's `Context`."#
let call_info = analysis.call_info(position).unwrap(); let call_info = analysis.call_info(position).unwrap();
assert!(call_info.is_none()); assert!(call_info.is_none());
} }
#[test]
fn test_nested_method_in_lamba() {
let info = call_info(
r#"struct Foo;
impl Foo {
fn bar(&self, _: u32) { }
}
fn bar(_: u32) { }
fn main() {
let foo = Foo;
std::thread::spawn(move || foo.bar(<|>));
}"#,
);
assert_eq!(info.parameters(), ["&self", "_: u32"]);
assert_eq!(info.active_parameter, Some(1));
assert_eq!(info.label(), "fn bar(&self, _: u32)");
}
#[test]
fn works_for_tuple_structs() {
let info = call_info(
r#"
/// A cool tuple struct
struct TS(u32, i32);
fn main() {
let s = TS(0, <|>);
}"#,
);
assert_eq!(info.label(), "struct TS(u32, i32) -> TS");
assert_eq!(info.doc().map(|it| it.into()), Some("A cool tuple struct".to_string()));
assert_eq!(info.active_parameter, Some(1));
}
#[test]
#[should_panic]
fn cant_call_named_structs() {
let _ = call_info(
r#"
struct TS { x: u32, y: i32 }
fn main() {
let s = TS(<|>);
}"#,
);
}
#[test]
fn works_for_enum_variants() {
let info = call_info(
r#"
enum E {
/// A Variant
A(i32),
/// Another
B,
/// And C
C { a: i32, b: i32 }
}
fn main() {
let a = E::A(<|>);
}
"#,
);
assert_eq!(info.label(), "E::A(0: i32)");
assert_eq!(info.doc().map(|it| it.into()), Some("A Variant".to_string()));
assert_eq!(info.active_parameter, Some(0));
}
#[test]
#[should_panic]
fn cant_call_enum_records() {
let _ = call_info(
r#"
enum E {
/// A Variant
A(i32),
/// Another
B,
/// And C
C { a: i32, b: i32 }
}
fn main() {
let a = E::C(<|>);
}
"#,
);
}
#[test]
fn fn_signature_for_macro() {
let info = call_info(
r#"
/// empty macro
macro_rules! foo {
() => {}
}
fn f() {
foo!(<|>);
}
"#,
);
assert_eq!(info.label(), "foo!()");
assert_eq!(info.doc().map(|it| it.into()), Some("empty macro".to_string()));
}
} }

View file

@ -43,7 +43,7 @@ impl fmt::Debug for AnalysisChange {
if !self.libraries_added.is_empty() { if !self.libraries_added.is_empty() {
d.field("libraries_added", &self.libraries_added.len()); d.field("libraries_added", &self.libraries_added.len());
} }
if !self.crate_graph.is_some() { if !self.crate_graph.is_none() {
d.field("crate_graph", &self.crate_graph); d.field("crate_graph", &self.crate_graph);
} }
d.finish() d.finish()

View file

@ -50,7 +50,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db),
_ => unreachable!(), _ => unreachable!(),
}; };
let krate = ctx.module.and_then(|m| m.krate(ctx.db)); let krate = ctx.module.map(|m| m.krate());
if let Some(krate) = krate { if let Some(krate) = krate {
ty.iterate_impl_items(ctx.db, krate, |item| { ty.iterate_impl_items(ctx.db, krate, |item| {
match item { match item {
@ -67,7 +67,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
}); });
} }
} }
_ => return, _ => {}
}; };
} }

View file

@ -9,16 +9,14 @@ use crate::{
}; };
use hir::{Ty, TypeCtor}; use hir::{Ty, TypeCtor};
use ra_syntax::{ast::AstNode, TextRange, TextUnit}; use ra_syntax::{ast::AstNode, TextRange, TextUnit};
use ra_text_edit::TextEditBuilder; use ra_text_edit::TextEdit;
fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder {
let edit = { let edit = {
let receiver_range = let receiver_range =
ctx.dot_receiver.as_ref().expect("no receiver available").syntax().text_range(); ctx.dot_receiver.as_ref().expect("no receiver available").syntax().text_range();
let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end());
let mut builder = TextEditBuilder::default(); TextEdit::replace(delete_range, snippet.to_string())
builder.replace(delete_range, snippet.to_string());
builder.finish()
}; };
CompletionItem::new(CompletionKind::Postfix, ctx.source_range(), label) CompletionItem::new(CompletionKind::Postfix, ctx.source_range(), label)
.detail(detail) .detail(detail)

View file

@ -4,7 +4,7 @@ use std::fmt;
use hir::Documentation; use hir::Documentation;
use ra_syntax::TextRange; use ra_syntax::TextRange;
use ra_text_edit::{TextEdit, TextEditBuilder}; use ra_text_edit::TextEdit;
/// `CompletionItem` describes a single completion variant in the editor pop-up. /// `CompletionItem` describes a single completion variant in the editor pop-up.
/// It is basically a POD with various properties. To construct a /// It is basically a POD with various properties. To construct a
@ -192,12 +192,10 @@ impl Builder {
let label = self.label; let label = self.label;
let text_edit = match self.text_edit { let text_edit = match self.text_edit {
Some(it) => it, Some(it) => it,
None => { None => TextEdit::replace(
let mut builder = TextEditBuilder::default(); self.source_range,
builder self.insert_text.unwrap_or_else(|| label.clone()),
.replace(self.source_range, self.insert_text.unwrap_or_else(|| label.clone())); ),
builder.finish()
}
}; };
CompletionItem { CompletionItem {

View file

@ -136,7 +136,7 @@ impl Completions {
for (idx, s) in docs.match_indices(&macro_name) { for (idx, s) in docs.match_indices(&macro_name) {
let (before, after) = (&docs[..idx], &docs[idx + s.len()..]); let (before, after) = (&docs[..idx], &docs[idx + s.len()..]);
// Ensure to match the full word // Ensure to match the full word
if after.starts_with("!") if after.starts_with('!')
&& before && before
.chars() .chars()
.rev() .rev()
@ -164,27 +164,32 @@ impl Completions {
name: Option<String>, name: Option<String>,
macro_: hir::MacroDef, macro_: hir::MacroDef,
) { ) {
let ast_node = macro_.source(ctx.db).ast; let name = match name {
if let Some(name) = name { Some(it) => it,
let detail = macro_label(&ast_node); None => return,
};
let docs = macro_.docs(ctx.db); let ast_node = macro_.source(ctx.db).ast;
let detail = macro_label(&ast_node);
let docs = macro_.docs(ctx.db);
let macro_declaration = format!("{}!", name);
let mut builder =
CompletionItem::new(CompletionKind::Reference, ctx.source_range(), &macro_declaration)
.kind(CompletionItemKind::Macro)
.set_documentation(docs.clone())
.detail(detail);
builder = if ctx.use_item_syntax.is_some() {
builder.insert_text(name)
} else {
let macro_braces_to_insert = let macro_braces_to_insert =
self.guess_macro_braces(&name, docs.as_ref().map_or("", |s| s.as_str())); self.guess_macro_braces(&name, docs.as_ref().map_or("", |s| s.as_str()));
let macro_declaration = name + "!"; builder.insert_snippet(macro_declaration + macro_braces_to_insert)
};
let builder = CompletionItem::new( self.add(builder);
CompletionKind::Reference,
ctx.source_range(),
&macro_declaration,
)
.kind(CompletionItemKind::Macro)
.set_documentation(docs)
.detail(detail)
.insert_snippet(macro_declaration + macro_braces_to_insert);
self.add(builder);
}
} }
fn add_function_with_name( fn add_function_with_name(
@ -220,7 +225,7 @@ impl Completions {
} else { } else {
(format!("{}($0)", data.name()), format!("{}(…)", name)) (format!("{}($0)", data.name()), format!("{}(…)", name))
}; };
builder = builder.lookup_by(name.clone()).label(label).insert_snippet(snippet); builder = builder.lookup_by(name).label(label).insert_snippet(snippet);
} }
self.add(builder) self.add(builder)
@ -281,10 +286,11 @@ fn has_non_default_type_params(def: hir::GenericDef, db: &db::RootDatabase) -> b
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::completion::{do_completion, CompletionItem, CompletionKind};
use insta::assert_debug_snapshot; use insta::assert_debug_snapshot;
use test_utils::covers; use test_utils::covers;
use crate::completion::{do_completion, CompletionItem, CompletionKind};
fn do_reference_completion(code: &str) -> Vec<CompletionItem> { fn do_reference_completion(code: &str) -> Vec<CompletionItem> {
do_completion(code, CompletionKind::Reference) do_completion(code, CompletionKind::Reference)
} }
@ -576,4 +582,34 @@ mod tests {
"### "###
); );
} }
#[test]
fn dont_insert_macro_call_braces_in_use() {
assert_debug_snapshot!(
do_reference_completion(
r"
//- /main.rs
use foo::<|>;
//- /foo/lib.rs
#[macro_export]
macro_rules frobnicate {
() => ()
}
"
),
@r###"
[
CompletionItem {
label: "frobnicate!",
source_range: [9; 9),
delete: [9; 9),
insert: "frobnicate",
kind: Macro,
detail: "#[macro_export]\nmacro_rules! frobnicate",
},
]
"###
)
}
} }

View file

@ -23,6 +23,7 @@ use crate::{
hir::db::InternDatabaseStorage, hir::db::InternDatabaseStorage,
hir::db::AstDatabaseStorage, hir::db::AstDatabaseStorage,
hir::db::DefDatabaseStorage, hir::db::DefDatabaseStorage,
hir::db::DefDatabase2Storage,
hir::db::HirDatabaseStorage hir::db::HirDatabaseStorage
)] )]
#[derive(Debug)] #[derive(Debug)]

View file

@ -85,10 +85,9 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
}) })
.on::<hir::diagnostics::MissingOkInTailExpr, _>(|d| { .on::<hir::diagnostics::MissingOkInTailExpr, _>(|d| {
let node = d.ast(db); let node = d.ast(db);
let mut builder = TextEditBuilder::default();
let replacement = format!("Ok({})", node.syntax()); let replacement = format!("Ok({})", node.syntax());
builder.replace(node.syntax().text_range(), replacement); let edit = TextEdit::replace(node.syntax().text_range(), replacement);
let fix = SourceChange::source_file_edit_from("wrap with ok", file_id, builder.finish()); let fix = SourceChange::source_file_edit_from("wrap with ok", file_id, edit);
res.borrow_mut().push(Diagnostic { res.borrow_mut().push(Diagnostic {
range: d.highlight_range(), range: d.highlight_range(),
message: d.message(), message: d.message(),
@ -152,9 +151,7 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start(); let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start();
let end = use_tree_list_node.text_range().end(); let end = use_tree_list_node.text_range().end();
let range = TextRange::from_to(start, end); let range = TextRange::from_to(start, end);
let mut edit_builder = TextEditBuilder::default(); return Some(TextEdit::delete(range));
edit_builder.delete(range);
return Some(edit_builder.finish());
} }
None None
} }

View file

@ -2,7 +2,7 @@
use std::fmt::{self, Display}; use std::fmt::{self, Display};
use hir::{Docs, Documentation, HasSource}; use hir::{Docs, Documentation, HasSource, HirDisplay};
use join_to_string::join; use join_to_string::join;
use ra_syntax::ast::{self, AstNode, NameOwner, VisibilityOwner}; use ra_syntax::ast::{self, AstNode, NameOwner, VisibilityOwner};
use std::convert::From; use std::convert::From;
@ -12,9 +12,18 @@ use crate::{
display::{generic_parameters, where_predicates}, display::{generic_parameters, where_predicates},
}; };
#[derive(Debug)]
pub enum CallableKind {
Function,
StructConstructor,
VariantConstructor,
Macro,
}
/// Contains information about a function signature /// Contains information about a function signature
#[derive(Debug)] #[derive(Debug)]
pub struct FunctionSignature { pub struct FunctionSignature {
pub kind: CallableKind,
/// Optional visibility /// Optional visibility
pub visibility: Option<String>, pub visibility: Option<String>,
/// Name of the function /// Name of the function
@ -42,6 +51,99 @@ impl FunctionSignature {
let ast_node = function.source(db).ast; let ast_node = function.source(db).ast;
FunctionSignature::from(&ast_node).with_doc_opt(doc) FunctionSignature::from(&ast_node).with_doc_opt(doc)
} }
pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option<Self> {
let node: ast::StructDef = st.source(db).ast;
match node.kind() {
ast::StructKind::Named(_) => return None,
_ => (),
};
let params = st
.fields(db)
.into_iter()
.map(|field: hir::StructField| {
let ty = field.ty(db);
format!("{}", ty.display(db))
})
.collect();
Some(
FunctionSignature {
kind: CallableKind::StructConstructor,
visibility: node.visibility().map(|n| n.syntax().text().to_string()),
name: node.name().map(|n| n.text().to_string()),
ret_type: node.name().map(|n| n.text().to_string()),
parameters: params,
generic_parameters: generic_parameters(&node),
where_predicates: where_predicates(&node),
doc: None,
}
.with_doc_opt(st.docs(db)),
)
}
pub(crate) fn from_enum_variant(
db: &db::RootDatabase,
variant: hir::EnumVariant,
) -> Option<Self> {
let node: ast::EnumVariant = variant.source(db).ast;
match node.kind() {
ast::StructKind::Named(_) | ast::StructKind::Unit => return None,
_ => (),
};
let parent_name = match variant.parent_enum(db).name(db) {
Some(name) => name.to_string(),
None => "missing".into(),
};
let name = format!("{}::{}", parent_name, variant.name(db).unwrap());
let params = variant
.fields(db)
.into_iter()
.map(|field: hir::StructField| {
let name = field.name(db);
let ty = field.ty(db);
format!("{}: {}", name, ty.display(db))
})
.collect();
Some(
FunctionSignature {
kind: CallableKind::VariantConstructor,
visibility: None,
name: Some(name),
ret_type: None,
parameters: params,
generic_parameters: vec![],
where_predicates: vec![],
doc: None,
}
.with_doc_opt(variant.docs(db)),
)
}
pub(crate) fn from_macro(db: &db::RootDatabase, macro_def: hir::MacroDef) -> Option<Self> {
let node: ast::MacroCall = macro_def.source(db).ast;
let params = vec![];
Some(
FunctionSignature {
kind: CallableKind::Macro,
visibility: None,
name: node.name().map(|n| n.text().to_string()),
ret_type: None,
parameters: params,
generic_parameters: vec![],
where_predicates: vec![],
doc: None,
}
.with_doc_opt(macro_def.docs(db)),
)
}
} }
impl From<&'_ ast::FnDef> for FunctionSignature { impl From<&'_ ast::FnDef> for FunctionSignature {
@ -59,6 +161,7 @@ impl From<&'_ ast::FnDef> for FunctionSignature {
} }
FunctionSignature { FunctionSignature {
kind: CallableKind::Function,
visibility: node.visibility().map(|n| n.syntax().text().to_string()), visibility: node.visibility().map(|n| n.syntax().text().to_string()),
name: node.name().map(|n| n.text().to_string()), name: node.name().map(|n| n.text().to_string()),
ret_type: node ret_type: node
@ -81,7 +184,12 @@ impl Display for FunctionSignature {
} }
if let Some(name) = &self.name { if let Some(name) = &self.name {
write!(f, "fn {}", name)?; match self.kind {
CallableKind::Function => write!(f, "fn {}", name)?,
CallableKind::StructConstructor => write!(f, "struct {}", name)?,
CallableKind::VariantConstructor => write!(f, "{}", name)?,
CallableKind::Macro => write!(f, "{}!", name)?,
}
} }
if !self.generic_parameters.is_empty() { if !self.generic_parameters.is_empty() {

View file

@ -5,7 +5,7 @@ use ra_syntax::{
algo::find_covering_element, algo::find_covering_element,
ast::{self, AstNode, AstToken}, ast::{self, AstNode, AstToken},
Direction, NodeOrToken, Direction, NodeOrToken,
SyntaxKind::*, SyntaxKind::{self, *},
SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
}; };
@ -29,10 +29,12 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
USE_TREE_LIST, USE_TREE_LIST,
TYPE_PARAM_LIST, TYPE_PARAM_LIST,
TYPE_ARG_LIST, TYPE_ARG_LIST,
TYPE_BOUND_LIST,
PARAM_LIST, PARAM_LIST,
ARG_LIST, ARG_LIST,
ARRAY_EXPR, ARRAY_EXPR,
TUPLE_EXPR, TUPLE_EXPR,
WHERE_CLAUSE,
]; ];
if range.is_empty() { if range.is_empty() {
@ -146,13 +148,17 @@ fn pick_best<'a>(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
} }
} }
/// Extend list item selection to include nearby comma and whitespace. /// Extend list item selection to include nearby delimiter and whitespace.
fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> { fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
fn is_single_line_ws(node: &SyntaxToken) -> bool { fn is_single_line_ws(node: &SyntaxToken) -> bool {
node.kind() == WHITESPACE && !node.text().contains('\n') node.kind() == WHITESPACE && !node.text().contains('\n')
} }
fn nearby_comma(node: &SyntaxNode, dir: Direction) -> Option<SyntaxToken> { fn nearby_delimiter(
delimiter_kind: SyntaxKind,
node: &SyntaxNode,
dir: Direction,
) -> Option<SyntaxToken> {
node.siblings_with_tokens(dir) node.siblings_with_tokens(dir)
.skip(1) .skip(1)
.skip_while(|node| match node { .skip_while(|node| match node {
@ -161,19 +167,26 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
}) })
.next() .next()
.and_then(|it| it.into_token()) .and_then(|it| it.into_token())
.filter(|node| node.kind() == T![,]) .filter(|node| node.kind() == delimiter_kind)
} }
if let Some(comma_node) = nearby_comma(node, Direction::Prev) { let delimiter = match node.kind() {
return Some(TextRange::from_to(comma_node.text_range().start(), node.text_range().end())); TYPE_BOUND => T![+],
_ => T![,],
};
if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Prev) {
return Some(TextRange::from_to(
delimiter_node.text_range().start(),
node.text_range().end(),
));
} }
if let Some(comma_node) = nearby_comma(node, Direction::Next) { if let Some(delimiter_node) = nearby_delimiter(delimiter, node, Direction::Next) {
// Include any following whitespace when comma if after list item. // Include any following whitespace when delimiter is after list item.
let final_node = comma_node let final_node = delimiter_node
.next_sibling_or_token() .next_sibling_or_token()
.and_then(|it| it.into_token()) .and_then(|it| it.into_token())
.filter(|node| is_single_line_ws(node)) .filter(|node| is_single_line_ws(node))
.unwrap_or(comma_node); .unwrap_or(delimiter_node);
return Some(TextRange::from_to(node.text_range().start(), final_node.text_range().end())); return Some(TextRange::from_to(node.text_range().start(), final_node.text_range().end()));
} }
@ -387,4 +400,53 @@ fn bar(){}
&["foo", "\" fn foo() {\""], &["foo", "\" fn foo() {\""],
); );
} }
#[test]
fn test_extend_trait_bounds_list_in_where_clause() {
do_check(
r#"
fn foo<R>()
where
R: req::Request + 'static,
R::Params: DeserializeOwned<|> + panic::UnwindSafe + 'static,
R::Result: Serialize + 'static,
"#,
&[
"DeserializeOwned",
"DeserializeOwned + ",
"DeserializeOwned + panic::UnwindSafe + 'static",
"R::Params: DeserializeOwned + panic::UnwindSafe + 'static",
"R::Params: DeserializeOwned + panic::UnwindSafe + 'static,",
],
);
do_check(r#"fn foo<T>() where T: <|>Copy"#, &["Copy"]);
do_check(r#"fn foo<T>() where T: <|>Copy + Display"#, &["Copy", "Copy + "]);
do_check(r#"fn foo<T>() where T: <|>Copy +Display"#, &["Copy", "Copy +"]);
do_check(r#"fn foo<T>() where T: <|>Copy+Display"#, &["Copy", "Copy+"]);
do_check(r#"fn foo<T>() where T: Copy + <|>Display"#, &["Display", "+ Display"]);
do_check(r#"fn foo<T>() where T: Copy + <|>Display + Sync"#, &["Display", "+ Display"]);
do_check(r#"fn foo<T>() where T: Copy +<|>Display"#, &["Display", "+Display"]);
}
#[test]
fn test_extend_trait_bounds_list_inline() {
do_check(r#"fn foo<T: <|>Copy>() {}"#, &["Copy"]);
do_check(r#"fn foo<T: <|>Copy + Display>() {}"#, &["Copy", "Copy + "]);
do_check(r#"fn foo<T: <|>Copy +Display>() {}"#, &["Copy", "Copy +"]);
do_check(r#"fn foo<T: <|>Copy+Display>() {}"#, &["Copy", "Copy+"]);
do_check(r#"fn foo<T: Copy + <|>Display>() {}"#, &["Display", "+ Display"]);
do_check(r#"fn foo<T: Copy + <|>Display + Sync>() {}"#, &["Display", "+ Display"]);
do_check(r#"fn foo<T: Copy +<|>Display>() {}"#, &["Display", "+Display"]);
do_check(
r#"fn foo<T: Copy<|> + Display, U: Copy>() {}"#,
&[
"Copy",
"Copy + ",
"Copy + Display",
"T: Copy + Display",
"T: Copy + Display, ",
"<T: Copy + Display, U: Copy>",
],
);
}
} }

View file

@ -51,7 +51,7 @@ fn impls_for_def(
} }
}; };
let krate = module.krate(db)?; let krate = module.krate();
let impls = db.impls_in_crate(krate); let impls = db.impls_in_crate(krate);
Some( Some(
@ -72,7 +72,7 @@ fn impls_for_trait(
let src = hir::Source { file_id: position.file_id.into(), ast: node.clone() }; let src = hir::Source { file_id: position.file_id.into(), ast: node.clone() };
let tr = hir::Trait::from_source(db, src)?; let tr = hir::Trait::from_source(db, src)?;
let krate = module.krate(db)?; let krate = module.krate();
let impls = db.impls_in_crate(krate); let impls = db.impls_in_crate(krate);
Some( Some(

View file

@ -14,6 +14,7 @@ mod db;
pub mod mock_analysis; pub mod mock_analysis;
mod symbol_index; mod symbol_index;
mod change; mod change;
mod source_change;
mod feature_flags; mod feature_flags;
mod status; mod status;
@ -54,8 +55,6 @@ use ra_db::{
CheckCanceled, FileLoader, SourceDatabase, CheckCanceled, FileLoader, SourceDatabase,
}; };
use ra_syntax::{SourceFile, TextRange, TextUnit}; use ra_syntax::{SourceFile, TextRange, TextUnit};
use ra_text_edit::TextEdit;
use relative_path::RelativePathBuf;
use crate::{db::LineIndexDatabase, symbol_index::FileSymbol}; use crate::{db::LineIndexDatabase, symbol_index::FileSymbol};
@ -73,6 +72,7 @@ pub use crate::{
line_index_utils::translate_offset_with_edit, line_index_utils::translate_offset_with_edit,
references::{ReferenceSearchResult, SearchScope}, references::{ReferenceSearchResult, SearchScope},
runnables::{Runnable, RunnableKind}, runnables::{Runnable, RunnableKind},
source_change::{FileSystemEdit, SourceChange, SourceFileEdit},
syntax_highlighting::HighlightedRange, syntax_highlighting::HighlightedRange,
}; };
@ -83,99 +83,6 @@ pub use ra_db::{
pub type Cancelable<T> = Result<T, Canceled>; pub type Cancelable<T> = Result<T, Canceled>;
#[derive(Debug)]
pub struct SourceChange {
pub label: String,
pub source_file_edits: Vec<SourceFileEdit>,
pub file_system_edits: Vec<FileSystemEdit>,
pub cursor_position: Option<FilePosition>,
}
impl SourceChange {
/// Creates a new SourceChange with the given label
/// from the edits.
pub(crate) fn from_edits<L: Into<String>>(
label: L,
source_file_edits: Vec<SourceFileEdit>,
file_system_edits: Vec<FileSystemEdit>,
) -> Self {
SourceChange {
label: label.into(),
source_file_edits,
file_system_edits,
cursor_position: None,
}
}
/// Creates a new SourceChange with the given label,
/// containing only the given `SourceFileEdits`.
pub(crate) fn source_file_edits<L: Into<String>>(label: L, edits: Vec<SourceFileEdit>) -> Self {
SourceChange {
label: label.into(),
source_file_edits: edits,
file_system_edits: vec![],
cursor_position: None,
}
}
/// Creates a new SourceChange with the given label,
/// containing only the given `FileSystemEdits`.
pub(crate) fn file_system_edits<L: Into<String>>(label: L, edits: Vec<FileSystemEdit>) -> Self {
SourceChange {
label: label.into(),
source_file_edits: vec![],
file_system_edits: edits,
cursor_position: None,
}
}
/// Creates a new SourceChange with the given label,
/// containing only a single `SourceFileEdit`.
pub(crate) fn source_file_edit<L: Into<String>>(label: L, edit: SourceFileEdit) -> Self {
SourceChange::source_file_edits(label, vec![edit])
}
/// Creates a new SourceChange with the given label
/// from the given `FileId` and `TextEdit`
pub(crate) fn source_file_edit_from<L: Into<String>>(
label: L,
file_id: FileId,
edit: TextEdit,
) -> Self {
SourceChange::source_file_edit(label, SourceFileEdit { file_id, edit })
}
/// Creates a new SourceChange with the given label
/// from the given `FileId` and `TextEdit`
pub(crate) fn file_system_edit<L: Into<String>>(label: L, edit: FileSystemEdit) -> Self {
SourceChange::file_system_edits(label, vec![edit])
}
/// Sets the cursor position to the given `FilePosition`
pub(crate) fn with_cursor(mut self, cursor_position: FilePosition) -> Self {
self.cursor_position = Some(cursor_position);
self
}
/// Sets the cursor position to the given `FilePosition`
pub(crate) fn with_cursor_opt(mut self, cursor_position: Option<FilePosition>) -> Self {
self.cursor_position = cursor_position;
self
}
}
#[derive(Debug)]
pub struct SourceFileEdit {
pub file_id: FileId,
pub edit: TextEdit,
}
#[derive(Debug)]
pub enum FileSystemEdit {
CreateFile { source_root: SourceRootId, path: RelativePathBuf },
MoveFile { src: FileId, dst_source_root: SourceRootId, dst_path: RelativePathBuf },
}
#[derive(Debug)] #[derive(Debug)]
pub struct Diagnostic { pub struct Diagnostic {
pub message: String, pub message: String,
@ -407,24 +314,20 @@ impl Analysis {
self.with_db(|db| typing::on_enter(&db, position)) self.with_db(|db| typing::on_enter(&db, position))
} }
/// Returns an edit which should be applied after `=` was typed. Primarily, /// Returns an edit which should be applied after a character was typed.
/// this works when adding `let =`. ///
// FIXME: use a snippet completion instead of this hack here. /// This is useful for some on-the-fly fixups, like adding `;` to `let =`
pub fn on_eq_typed(&self, position: FilePosition) -> Cancelable<Option<SourceChange>> { /// automatically.
self.with_db(|db| { pub fn on_char_typed(
let parse = db.parse(position.file_id); &self,
let file = parse.tree(); position: FilePosition,
let edit = typing::on_eq_typed(&file, position.offset)?; char_typed: char,
Some(SourceChange::source_file_edit( ) -> Cancelable<Option<SourceChange>> {
"add semicolon", // Fast path to not even parse the file.
SourceFileEdit { edit, file_id: position.file_id }, if !typing::TRIGGER_CHARS.contains(char_typed) {
)) return Ok(None);
}) }
} self.with_db(|db| typing::on_char_typed(&db, position, char_typed))
/// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately.
pub fn on_dot_typed(&self, position: FilePosition) -> Cancelable<Option<SourceChange>> {
self.with_db(|db| typing::on_dot_typed(&db, position))
} }
/// Returns a tree representation of symbols in the file. Useful to draw a /// Returns a tree representation of symbols in the file. Useful to draw a

View file

@ -27,10 +27,7 @@ pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
Some(it) => it, Some(it) => it,
None => return Vec::new(), None => return Vec::new(),
}; };
let krate = match module.krate(db) { let krate = module.krate();
Some(it) => it,
None => return Vec::new(),
};
vec![krate.crate_id()] vec![krate.crate_id()]
} }

View file

@ -3,6 +3,7 @@
use hir::ModuleSource; use hir::ModuleSource;
use ra_db::{SourceDatabase, SourceDatabaseExt}; use ra_db::{SourceDatabase, SourceDatabaseExt};
use ra_syntax::{algo::find_node_at_offset, ast, AstNode, SyntaxNode}; use ra_syntax::{algo::find_node_at_offset, ast, AstNode, SyntaxNode};
use ra_text_edit::TextEdit;
use relative_path::{RelativePath, RelativePathBuf}; use relative_path::{RelativePath, RelativePathBuf};
use crate::{ use crate::{
@ -43,14 +44,7 @@ fn source_edit_from_file_id_range(
range: TextRange, range: TextRange,
new_name: &str, new_name: &str,
) -> SourceFileEdit { ) -> SourceFileEdit {
SourceFileEdit { SourceFileEdit { file_id, edit: TextEdit::replace(range, new_name.into()) }
file_id,
edit: {
let mut builder = ra_text_edit::TextEditBuilder::default();
builder.replace(range, new_name.into());
builder.finish()
},
}
} }
fn rename_mod( fn rename_mod(
@ -94,11 +88,7 @@ fn rename_mod(
let edit = SourceFileEdit { let edit = SourceFileEdit {
file_id: position.file_id, file_id: position.file_id,
edit: { edit: TextEdit::replace(ast_name.syntax().text_range(), new_name.into()),
let mut builder = ra_text_edit::TextEditBuilder::default();
builder.replace(ast_name.syntax().text_range(), new_name.into());
builder.finish()
},
}; };
source_file_edits.push(edit); source_file_edits.push(edit);
@ -126,12 +116,14 @@ fn rename_reference(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use insta::assert_debug_snapshot;
use ra_text_edit::TextEditBuilder;
use test_utils::assert_eq_text;
use crate::{ use crate::{
mock_analysis::analysis_and_position, mock_analysis::single_file_with_position, FileId, mock_analysis::analysis_and_position, mock_analysis::single_file_with_position, FileId,
ReferenceSearchResult, ReferenceSearchResult,
}; };
use insta::assert_debug_snapshot;
use test_utils::assert_eq_text;
#[test] #[test]
fn test_find_all_refs_for_local() { fn test_find_all_refs_for_local() {
@ -452,7 +444,7 @@ mod tests {
fn test_rename(text: &str, new_name: &str, expected: &str) { fn test_rename(text: &str, new_name: &str, expected: &str) {
let (analysis, position) = single_file_with_position(text); let (analysis, position) = single_file_with_position(text);
let source_change = analysis.rename(position, new_name).unwrap(); let source_change = analysis.rename(position, new_name).unwrap();
let mut text_edit_builder = ra_text_edit::TextEditBuilder::default(); let mut text_edit_builder = TextEditBuilder::default();
let mut file_id: Option<FileId> = None; let mut file_id: Option<FileId> = None;
if let Some(change) = source_change { if let Some(change) = source_change {
for edit in change.info.source_file_edits { for edit in change.info.source_file_edits {

View file

@ -111,8 +111,7 @@ impl NameDefinition {
if vis.as_str() != "" { if vis.as_str() != "" {
let source_root_id = db.file_source_root(file_id); let source_root_id = db.file_source_root(file_id);
let source_root = db.source_root(source_root_id); let source_root = db.source_root(source_root_id);
let mut res = let mut res = source_root.walk().map(|id| (id, None)).collect::<FxHashMap<_, _>>();
source_root.walk().map(|id| (id.into(), None)).collect::<FxHashMap<_, _>>();
// FIXME: add "pub(in path)" // FIXME: add "pub(in path)"
@ -120,7 +119,7 @@ impl NameDefinition {
return SearchScope::new(res); return SearchScope::new(res);
} }
if vis.as_str() == "pub" { if vis.as_str() == "pub" {
let krate = self.container.krate(db).unwrap(); let krate = self.container.krate();
let crate_graph = db.crate_graph(); let crate_graph = db.crate_graph();
for crate_id in crate_graph.iter() { for crate_id in crate_graph.iter() {
let mut crate_deps = crate_graph.dependencies(crate_id); let mut crate_deps = crate_graph.dependencies(crate_id);
@ -128,7 +127,7 @@ impl NameDefinition {
let root_file = crate_graph.crate_root(crate_id); let root_file = crate_graph.crate_root(crate_id);
let source_root_id = db.file_source_root(root_file); let source_root_id = db.file_source_root(root_file);
let source_root = db.source_root(source_root_id); let source_root = db.source_root(source_root_id);
res.extend(source_root.walk().map(|id| (id.into(), None))); res.extend(source_root.walk().map(|id| (id, None)));
} }
} }
return SearchScope::new(res); return SearchScope::new(res);

View file

@ -4,7 +4,7 @@ use itertools::Itertools;
use ra_db::SourceDatabase; use ra_db::SourceDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner}, ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner},
SyntaxNode, TextRange, match_ast, SyntaxNode, TextRange,
}; };
use crate::{db::RootDatabase, FileId}; use crate::{db::RootDatabase, FileId};
@ -29,12 +29,12 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
} }
fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option<Runnable> { fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option<Runnable> {
if let Some(fn_def) = ast::FnDef::cast(item.clone()) { match_ast! {
runnable_fn(fn_def) match item {
} else if let Some(m) = ast::Module::cast(item) { ast::FnDef(it) => { runnable_fn(it) },
runnable_mod(db, file_id, m) ast::Module(it) => { runnable_mod(db, file_id, it) },
} else { _ => { None },
None }
} }
} }

View file

@ -0,0 +1,119 @@
//! This modules defines type to represent changes to the source code, that flow
//! from the server to the client.
//!
//! It can be viewed as a dual for `AnalysisChange`.
use ra_text_edit::TextEdit;
use relative_path::RelativePathBuf;
use crate::{FileId, FilePosition, SourceRootId, TextUnit};
#[derive(Debug)]
pub struct SourceChange {
pub label: String,
pub source_file_edits: Vec<SourceFileEdit>,
pub file_system_edits: Vec<FileSystemEdit>,
pub cursor_position: Option<FilePosition>,
}
impl SourceChange {
/// Creates a new SourceChange with the given label
/// from the edits.
pub(crate) fn from_edits<L: Into<String>>(
label: L,
source_file_edits: Vec<SourceFileEdit>,
file_system_edits: Vec<FileSystemEdit>,
) -> Self {
SourceChange {
label: label.into(),
source_file_edits,
file_system_edits,
cursor_position: None,
}
}
/// Creates a new SourceChange with the given label,
/// containing only the given `SourceFileEdits`.
pub(crate) fn source_file_edits<L: Into<String>>(label: L, edits: Vec<SourceFileEdit>) -> Self {
SourceChange {
label: label.into(),
source_file_edits: edits,
file_system_edits: vec![],
cursor_position: None,
}
}
/// Creates a new SourceChange with the given label,
/// containing only the given `FileSystemEdits`.
pub(crate) fn file_system_edits<L: Into<String>>(label: L, edits: Vec<FileSystemEdit>) -> Self {
SourceChange {
label: label.into(),
source_file_edits: vec![],
file_system_edits: edits,
cursor_position: None,
}
}
/// Creates a new SourceChange with the given label,
/// containing only a single `SourceFileEdit`.
pub(crate) fn source_file_edit<L: Into<String>>(label: L, edit: SourceFileEdit) -> Self {
SourceChange::source_file_edits(label, vec![edit])
}
/// Creates a new SourceChange with the given label
/// from the given `FileId` and `TextEdit`
pub(crate) fn source_file_edit_from<L: Into<String>>(
label: L,
file_id: FileId,
edit: TextEdit,
) -> Self {
SourceChange::source_file_edit(label, SourceFileEdit { file_id, edit })
}
/// Creates a new SourceChange with the given label
/// from the given `FileId` and `TextEdit`
pub(crate) fn file_system_edit<L: Into<String>>(label: L, edit: FileSystemEdit) -> Self {
SourceChange::file_system_edits(label, vec![edit])
}
/// Sets the cursor position to the given `FilePosition`
pub(crate) fn with_cursor(mut self, cursor_position: FilePosition) -> Self {
self.cursor_position = Some(cursor_position);
self
}
/// Sets the cursor position to the given `FilePosition`
pub(crate) fn with_cursor_opt(mut self, cursor_position: Option<FilePosition>) -> Self {
self.cursor_position = cursor_position;
self
}
}
#[derive(Debug)]
pub struct SourceFileEdit {
pub file_id: FileId,
pub edit: TextEdit,
}
#[derive(Debug)]
pub enum FileSystemEdit {
CreateFile { source_root: SourceRootId, path: RelativePathBuf },
MoveFile { src: FileId, dst_source_root: SourceRootId, dst_path: RelativePathBuf },
}
pub(crate) struct SingleFileChange {
pub label: String,
pub edit: TextEdit,
pub cursor_position: Option<TextUnit>,
}
impl SingleFileChange {
pub(crate) fn into_source_change(self, file_id: FileId) -> SourceChange {
SourceChange {
label: self.label,
source_file_edits: vec![SourceFileEdit { file_id, edit: self.edit }],
file_system_edits: Vec::new(),
cursor_position: self.cursor_position.map(|offset| FilePosition { file_id, offset }),
}
}
}

View file

@ -1,4 +1,17 @@
//! FIXME: write short doc here //! This module handles auto-magic editing actions applied together with users
//! edits. For example, if the user typed
//!
//! ```text
//! foo
//! .bar()
//! .baz()
//! | // <- cursor is here
//! ```
//!
//! and types `.` next, we want to indent the dot.
//!
//! Language server executes such typing assists synchronously. That is, they
//! block user's typing and should be pretty fast for this reason!
use ra_db::{FilePosition, SourceDatabase}; use ra_db::{FilePosition, SourceDatabase};
use ra_fmt::leading_indent; use ra_fmt::leading_indent;
@ -9,9 +22,9 @@ use ra_syntax::{
SyntaxKind::*, SyntaxKind::*,
SyntaxToken, TextRange, TextUnit, TokenAtOffset, SyntaxToken, TextRange, TextUnit, TokenAtOffset,
}; };
use ra_text_edit::{TextEdit, TextEditBuilder}; use ra_text_edit::TextEdit;
use crate::{db::RootDatabase, SourceChange, SourceFileEdit}; use crate::{db::RootDatabase, source_change::SingleFileChange, SourceChange, SourceFileEdit};
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
let parse = db.parse(position.file_id); let parse = db.parse(position.file_id);
@ -36,13 +49,12 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
let indent = node_indent(&file, comment.syntax())?; let indent = node_indent(&file, comment.syntax())?;
let inserted = format!("\n{}{} ", indent, prefix); let inserted = format!("\n{}{} ", indent, prefix);
let cursor_position = position.offset + TextUnit::of_str(&inserted); let cursor_position = position.offset + TextUnit::of_str(&inserted);
let mut edit = TextEditBuilder::default(); let edit = TextEdit::insert(position.offset, inserted);
edit.insert(position.offset, inserted);
Some( Some(
SourceChange::source_file_edit( SourceChange::source_file_edit(
"on enter", "on enter",
SourceFileEdit { edit: edit.finish(), file_id: position.file_id }, SourceFileEdit { edit, file_id: position.file_id },
) )
.with_cursor(FilePosition { offset: cursor_position, file_id: position.file_id }), .with_cursor(FilePosition { offset: cursor_position, file_id: position.file_id }),
) )
@ -68,39 +80,67 @@ fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
Some(text[pos..].into()) Some(text[pos..].into())
} }
pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> { pub(crate) const TRIGGER_CHARS: &str = ".=>";
assert_eq!(file.syntax().text().char_at(eq_offset), Some('='));
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?; pub(crate) fn on_char_typed(
db: &RootDatabase,
position: FilePosition,
char_typed: char,
) -> Option<SourceChange> {
assert!(TRIGGER_CHARS.contains(char_typed));
let file = &db.parse(position.file_id).tree();
assert_eq!(file.syntax().text().char_at(position.offset), Some(char_typed));
let single_file_change = on_char_typed_inner(file, position.offset, char_typed)?;
Some(single_file_change.into_source_change(position.file_id))
}
fn on_char_typed_inner(
file: &SourceFile,
offset: TextUnit,
char_typed: char,
) -> Option<SingleFileChange> {
assert!(TRIGGER_CHARS.contains(char_typed));
match char_typed {
'.' => on_dot_typed(file, offset),
'=' => on_eq_typed(file, offset),
'>' => on_arrow_typed(file, offset),
_ => unreachable!(),
}
}
/// Returns an edit which should be applied after `=` was typed. Primarily,
/// this works when adding `let =`.
// FIXME: use a snippet completion instead of this hack here.
fn on_eq_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChange> {
assert_eq!(file.syntax().text().char_at(offset), Some('='));
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
if let_stmt.has_semi() { if let_stmt.has_semi() {
return None; return None;
} }
if let Some(expr) = let_stmt.initializer() { if let Some(expr) = let_stmt.initializer() {
let expr_range = expr.syntax().text_range(); let expr_range = expr.syntax().text_range();
if expr_range.contains(eq_offset) && eq_offset != expr_range.start() { if expr_range.contains(offset) && offset != expr_range.start() {
return None; return None;
} }
if file.syntax().text().slice(eq_offset..expr_range.start()).contains_char('\n') { if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') {
return None; return None;
} }
} else { } else {
return None; return None;
} }
let offset = let_stmt.syntax().text_range().end(); let offset = let_stmt.syntax().text_range().end();
let mut edit = TextEditBuilder::default(); Some(SingleFileChange {
edit.insert(offset, ";".to_string()); label: "add semicolon".to_string(),
Some(edit.finish()) edit: TextEdit::insert(offset, ";".to_string()),
cursor_position: None,
})
} }
pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { /// Returns an edit which should be applied when a dot ('.') is typed on a blank line, indenting the line appropriately.
let parse = db.parse(position.file_id); fn on_dot_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChange> {
assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.')); assert_eq!(file.syntax().text().char_at(offset), Some('.'));
let whitespace =
let whitespace = parse file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?;
.tree()
.syntax()
.token_at_offset(position.offset)
.left_biased()
.and_then(ast::Whitespace::cast)?;
let current_indent = { let current_indent = {
let text = whitespace.text(); let text = whitespace.text();
@ -117,20 +157,36 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
if current_indent_len == target_indent_len { if current_indent_len == target_indent_len {
return None; return None;
} }
let mut edit = TextEditBuilder::default();
edit.replace(
TextRange::from_to(position.offset - current_indent_len, position.offset),
target_indent,
);
let res = SourceChange::source_file_edit_from("reindent dot", position.file_id, edit.finish()) Some(SingleFileChange {
.with_cursor(FilePosition { label: "reindent dot".to_string(),
offset: position.offset + target_indent_len - current_indent_len edit: TextEdit::replace(
+ TextUnit::of_char('.'), TextRange::from_to(offset - current_indent_len, offset),
file_id: position.file_id, target_indent,
}); ),
cursor_position: Some(
offset + target_indent_len - current_indent_len + TextUnit::of_char('.'),
),
})
}
Some(res) /// Adds a space after an arrow when `fn foo() { ... }` is turned into `fn foo() -> { ... }`
fn on_arrow_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChange> {
let file_text = file.syntax().text();
assert_eq!(file_text.char_at(offset), Some('>'));
let after_arrow = offset + TextUnit::of_char('>');
if file_text.char_at(after_arrow) != Some('{') {
return None;
}
if find_node_at_offset::<ast::RetType>(file.syntax(), offset).is_none() {
return None;
}
Some(SingleFileChange {
label: "add space after return type".to_string(),
edit: TextEdit::insert(after_arrow, " ".to_string()),
cursor_position: Some(after_arrow),
})
} }
#[cfg(test)] #[cfg(test)]
@ -141,239 +197,6 @@ mod tests {
use super::*; use super::*;
#[test]
fn test_on_eq_typed() {
fn type_eq(before: &str, after: &str) {
let (offset, before) = extract_offset(before);
let mut edit = TextEditBuilder::default();
edit.insert(offset, "=".to_string());
let before = edit.finish().apply(&before);
let parse = SourceFile::parse(&before);
if let Some(result) = on_eq_typed(&parse.tree(), offset) {
let actual = result.apply(&before);
assert_eq_text!(after, &actual);
} else {
assert_eq_text!(&before, after)
};
}
// do_check(r"
// fn foo() {
// let foo =<|>
// }
// ", r"
// fn foo() {
// let foo =;
// }
// ");
type_eq(
r"
fn foo() {
let foo <|> 1 + 1
}
",
r"
fn foo() {
let foo = 1 + 1;
}
",
);
// do_check(r"
// fn foo() {
// let foo =<|>
// let bar = 1;
// }
// ", r"
// fn foo() {
// let foo =;
// let bar = 1;
// }
// ");
}
fn type_dot(before: &str, after: &str) {
let (offset, before) = extract_offset(before);
let mut edit = TextEditBuilder::default();
edit.insert(offset, ".".to_string());
let before = edit.finish().apply(&before);
let (analysis, file_id) = single_file(&before);
if let Some(result) = analysis.on_dot_typed(FilePosition { offset, file_id }).unwrap() {
assert_eq!(result.source_file_edits.len(), 1);
let actual = result.source_file_edits[0].edit.apply(&before);
assert_eq_text!(after, &actual);
} else {
assert_eq_text!(&before, after)
};
}
#[test]
fn indents_new_chain_call() {
type_dot(
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
<|>
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.
}
",
);
type_dot(
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
<|>
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.
}
",
)
}
#[test]
fn indents_new_chain_call_with_semi() {
type_dot(
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
<|>;
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.;
}
",
);
type_dot(
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
<|>;
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.;
}
",
)
}
#[test]
fn indents_continued_chain_call() {
type_dot(
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.first()
<|>
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.first()
.
}
",
);
type_dot(
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.first()
<|>
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.first()
.
}
",
);
}
#[test]
fn indents_middle_of_chain_call() {
type_dot(
r"
fn source_impl() {
let var = enum_defvariant_list().unwrap()
<|>
.nth(92)
.unwrap();
}
",
r"
fn source_impl() {
let var = enum_defvariant_list().unwrap()
.
.nth(92)
.unwrap();
}
",
);
type_dot(
r"
fn source_impl() {
let var = enum_defvariant_list().unwrap()
<|>
.nth(92)
.unwrap();
}
",
r"
fn source_impl() {
let var = enum_defvariant_list().unwrap()
.
.nth(92)
.unwrap();
}
",
);
}
#[test]
fn dont_indent_freestanding_dot() {
type_dot(
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
<|>
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
.
}
",
);
type_dot(
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
<|>
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
.
}
",
);
}
#[test] #[test]
fn test_on_enter() { fn test_on_enter() {
fn apply_on_enter(before: &str) -> Option<String> { fn apply_on_enter(before: &str) -> Option<String> {
@ -426,4 +249,214 @@ impl S {
); );
do_check_noop(r"<|>//! docz"); do_check_noop(r"<|>//! docz");
} }
fn do_type_char(char_typed: char, before: &str) -> Option<(String, SingleFileChange)> {
let (offset, before) = extract_offset(before);
let edit = TextEdit::insert(offset, char_typed.to_string());
let before = edit.apply(&before);
let parse = SourceFile::parse(&before);
on_char_typed_inner(&parse.tree(), offset, char_typed)
.map(|it| (it.edit.apply(&before), it))
}
fn type_char(char_typed: char, before: &str, after: &str) {
let (actual, file_change) = do_type_char(char_typed, before)
.unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed));
if after.contains("<|>") {
let (offset, after) = extract_offset(after);
assert_eq_text!(&after, &actual);
assert_eq!(file_change.cursor_position, Some(offset))
} else {
assert_eq_text!(after, &actual);
}
}
fn type_char_noop(char_typed: char, before: &str) {
let file_change = do_type_char(char_typed, before);
assert!(file_change.is_none())
}
#[test]
fn test_on_eq_typed() {
// do_check(r"
// fn foo() {
// let foo =<|>
// }
// ", r"
// fn foo() {
// let foo =;
// }
// ");
type_char(
'=',
r"
fn foo() {
let foo <|> 1 + 1
}
",
r"
fn foo() {
let foo = 1 + 1;
}
",
);
// do_check(r"
// fn foo() {
// let foo =<|>
// let bar = 1;
// }
// ", r"
// fn foo() {
// let foo =;
// let bar = 1;
// }
// ");
}
#[test]
fn indents_new_chain_call() {
type_char(
'.',
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
<|>
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.
}
",
);
type_char_noop(
'.',
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
<|>
}
",
)
}
#[test]
fn indents_new_chain_call_with_semi() {
type_char(
'.',
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
<|>;
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.;
}
",
);
type_char_noop(
'.',
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
<|>;
}
",
)
}
#[test]
fn indents_continued_chain_call() {
type_char(
'.',
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.first()
<|>
}
",
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.first()
.
}
",
);
type_char_noop(
'.',
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
self.child_impl(db, name)
.first()
<|>
}
",
);
}
#[test]
fn indents_middle_of_chain_call() {
type_char(
'.',
r"
fn source_impl() {
let var = enum_defvariant_list().unwrap()
<|>
.nth(92)
.unwrap();
}
",
r"
fn source_impl() {
let var = enum_defvariant_list().unwrap()
.
.nth(92)
.unwrap();
}
",
);
type_char_noop(
'.',
r"
fn source_impl() {
let var = enum_defvariant_list().unwrap()
<|>
.nth(92)
.unwrap();
}
",
);
}
#[test]
fn dont_indent_freestanding_dot() {
type_char_noop(
'.',
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
<|>
}
",
);
type_char_noop(
'.',
r"
pub fn child(&self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<Module>> {
<|>
}
",
);
}
#[test]
fn adds_space_after_return_type() {
type_char('>', "fn foo() -<|>{ 92 }", "fn foo() -><|> { 92 }")
}
} }

View file

@ -38,7 +38,7 @@ pub fn server_capabilities() -> ServerCapabilities {
document_range_formatting_provider: None, document_range_formatting_provider: None,
document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions {
first_trigger_character: "=".to_string(), first_trigger_character: "=".to_string(),
more_trigger_character: Some(vec![".".to_string()]), more_trigger_character: Some(vec![".".to_string(), ">".to_string()]),
}), }),
selection_range_provider: Some(GenericCapability::default()), selection_range_provider: Some(GenericCapability::default()),
folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),

View file

@ -1,4 +1,11 @@
//! FIXME: write short doc here //! Config used by the language server.
//!
//! We currently get this config from `initialize` LSP request, which is not the
//! best way to do it, but was the simplest thing we could implement.
//!
//! Of particular interest is the `feature_flags` hash map: while other fields
//! configure the server itself, feature flags are passed into analysis, and
//! tweak things like automatic insertion of `()` in completions.
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -72,10 +79,7 @@ mod test {
assert_eq!(default, serde_json::from_str(r#"{}"#).unwrap()); assert_eq!(default, serde_json::from_str(r#"{}"#).unwrap());
assert_eq!( assert_eq!(
default, default,
serde_json::from_str( serde_json::from_str(r#"{"publishDecorations":null, "lruCapacity":null}"#).unwrap()
r#"{"publishDecorations":null, "showWorkspaceLoaded":null, "lruCapacity":null}"#
)
.unwrap()
); );
} }
} }

Some files were not shown because too many files have changed in this diff Show more