mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-03 15:15:24 +00:00
Rollup merge of #132029 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
This commit is contained in:
commit
81d6960f76
63 changed files with 7126 additions and 2938 deletions
|
@ -13,5 +13,5 @@ max_line_length = 100
|
||||||
[*.md]
|
[*.md]
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
[*.{yml, yaml}]
|
[*.{yml,yaml}]
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
|
@ -104,11 +104,11 @@ jobs:
|
||||||
if: matrix.os == 'ubuntu-latest'
|
if: matrix.os == 'ubuntu-latest'
|
||||||
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats .
|
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats .
|
||||||
|
|
||||||
- name: Run analysis-stats on rust std library
|
- name: Run analysis-stats on the rust standard libraries
|
||||||
if: matrix.os == 'ubuntu-latest'
|
if: matrix.os == 'ubuntu-latest'
|
||||||
env:
|
env:
|
||||||
RUSTC_BOOTSTRAP: 1
|
RUSTC_BOOTSTRAP: 1
|
||||||
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
|
run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps --no-sysroot --no-test $(rustc --print sysroot)/lib/rustlib/src/rust/library/
|
||||||
|
|
||||||
- name: clippy
|
- name: clippy
|
||||||
if: matrix.os == 'windows-latest'
|
if: matrix.os == 'windows-latest'
|
||||||
|
|
86
Cargo.lock
generated
86
Cargo.lock
generated
|
@ -73,7 +73,7 @@ dependencies = [
|
||||||
"intern",
|
"intern",
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"lz4_flex",
|
"lz4_flex",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"salsa",
|
"salsa",
|
||||||
"semver",
|
"semver",
|
||||||
"span",
|
"span",
|
||||||
|
@ -161,7 +161,7 @@ dependencies = [
|
||||||
"expect-test",
|
"expect-test",
|
||||||
"intern",
|
"intern",
|
||||||
"oorandom",
|
"oorandom",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"syntax",
|
"syntax",
|
||||||
"syntax-bridge",
|
"syntax-bridge",
|
||||||
"tt",
|
"tt",
|
||||||
|
@ -216,7 +216,7 @@ dependencies = [
|
||||||
"chalk-derive",
|
"chalk-derive",
|
||||||
"chalk-ir",
|
"chalk-ir",
|
||||||
"chalk-solve",
|
"chalk-solve",
|
||||||
"rustc-hash",
|
"rustc-hash 1.1.0",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -232,7 +232,7 @@ dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itertools",
|
"itertools",
|
||||||
"petgraph",
|
"petgraph",
|
||||||
"rustc-hash",
|
"rustc-hash 1.1.0",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -513,7 +513,7 @@ dependencies = [
|
||||||
"hir-ty",
|
"hir-ty",
|
||||||
"intern",
|
"intern",
|
||||||
"itertools",
|
"itertools",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
@ -547,7 +547,7 @@ dependencies = [
|
||||||
"mbe",
|
"mbe",
|
||||||
"ra-ap-rustc_abi",
|
"ra-ap-rustc_abi",
|
||||||
"ra-ap-rustc_parse_format",
|
"ra-ap-rustc_parse_format",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"rustc_apfloat",
|
"rustc_apfloat",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"span",
|
"span",
|
||||||
|
@ -577,7 +577,7 @@ dependencies = [
|
||||||
"limit",
|
"limit",
|
||||||
"mbe",
|
"mbe",
|
||||||
"parser",
|
"parser",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
@ -616,7 +616,7 @@ dependencies = [
|
||||||
"ra-ap-rustc_abi",
|
"ra-ap-rustc_abi",
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index",
|
||||||
"ra-ap-rustc_pattern_analysis",
|
"ra-ap-rustc_pattern_analysis",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"rustc_apfloat",
|
"rustc_apfloat",
|
||||||
"scoped-tls",
|
"scoped-tls",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
|
@ -731,13 +731,13 @@ dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itertools",
|
"itertools",
|
||||||
"limit",
|
"limit",
|
||||||
"line-index 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"memchr",
|
"memchr",
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"parser",
|
"parser",
|
||||||
"profile",
|
"profile",
|
||||||
"rayon",
|
"rayon",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
@ -834,7 +834,7 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"dashmap",
|
"dashmap",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"sptr",
|
"sptr",
|
||||||
"triomphe",
|
"triomphe",
|
||||||
]
|
]
|
||||||
|
@ -939,7 +939,7 @@ version = "0.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "line-index"
|
name = "line-index"
|
||||||
version = "0.1.1"
|
version = "0.1.2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"oorandom",
|
"oorandom",
|
||||||
|
@ -948,9 +948,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "line-index"
|
name = "line-index"
|
||||||
version = "0.1.1"
|
version = "0.1.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "67d61795376ae2683928c218fda7d7d7db136fd38c06b7552904667f0d55580a"
|
checksum = "3e27e0ed5a392a7f5ba0b3808a2afccff16c64933312c84b57618b49d1209bd2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"text-size",
|
"text-size",
|
||||||
|
@ -1051,7 +1051,7 @@ dependencies = [
|
||||||
"intern",
|
"intern",
|
||||||
"parser",
|
"parser",
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
@ -1345,7 +1345,7 @@ dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"intern",
|
"intern",
|
||||||
"paths",
|
"paths",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"span",
|
"span",
|
||||||
|
@ -1435,7 +1435,7 @@ dependencies = [
|
||||||
"itertools",
|
"itertools",
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"paths",
|
"paths",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"semver",
|
"semver",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
@ -1497,9 +1497,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_abi"
|
name = "ra-ap-rustc_abi"
|
||||||
version = "0.71.0"
|
version = "0.73.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c6999d098000b98415939f13158dac78cb3eeeb7b0c073847f3e4b623866e27c"
|
checksum = "879ece0781e3c1cb670b9f29775c81a43a16db789d1296fad6bc5c74065b2fac"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.6.0",
|
"bitflags 2.6.0",
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index",
|
||||||
|
@ -1508,9 +1508,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_index"
|
name = "ra-ap-rustc_index"
|
||||||
version = "0.71.0"
|
version = "0.73.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ae9fb312d942817dab10790881f555928c1f6a11a85186e8e573ad4a86c7d3be"
|
checksum = "6910087ff89bb9f3db114bfcd86b5139042731fe7278d3ff4ceaa69a140154a7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arrayvec",
|
"arrayvec",
|
||||||
"ra-ap-rustc_index_macros",
|
"ra-ap-rustc_index_macros",
|
||||||
|
@ -1519,9 +1519,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_index_macros"
|
name = "ra-ap-rustc_index_macros"
|
||||||
version = "0.71.0"
|
version = "0.73.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "766e3990eb1066a06deefc561b5a01b32ca5c9211feea31cbf4ed50611519872"
|
checksum = "3b6f7bd12b678fbb37444ba77f3b0cfc13b7394a6dc7b0c799491fc9df0a9997"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -1530,9 +1530,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_lexer"
|
name = "ra-ap-rustc_lexer"
|
||||||
version = "0.71.0"
|
version = "0.73.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f4afa98eb7889c137d5a3f1cd189089e16da04d1e4837d358a67aa3dab10ffbe"
|
checksum = "119bc05b5b6bc3e7f5b67ce8b8080e185da94bd83c447f91b6b3f3ecf60cbab1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-properties",
|
"unicode-properties",
|
||||||
"unicode-xid",
|
"unicode-xid",
|
||||||
|
@ -1540,9 +1540,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_parse_format"
|
name = "ra-ap-rustc_parse_format"
|
||||||
version = "0.71.0"
|
version = "0.73.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d9234c96ffb0565286790407fb7eb7f55ebf69267de4db382fdec0a17f14b0e2"
|
checksum = "70ed6150ae71d905c064dc88d7824ebb0fa81083f45d7477cba7b57176f2f635"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index",
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
|
@ -1550,12 +1550,12 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ra-ap-rustc_pattern_analysis"
|
name = "ra-ap-rustc_pattern_analysis"
|
||||||
version = "0.71.0"
|
version = "0.73.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "273d5f72926a58c7eea27aebc898d1d5b32d23d2342f692a94a2cf8746aa4a2f"
|
checksum = "6e830862a0ec85fce211d34735315686bb8d6a12d418d6d735fb534aa1cd3293"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ra-ap-rustc_index",
|
"ra-ap-rustc_index",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"rustc_apfloat",
|
"rustc_apfloat",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
@ -1640,7 +1640,7 @@ dependencies = [
|
||||||
"countme",
|
"countme",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
"memoffset",
|
"memoffset",
|
||||||
"rustc-hash",
|
"rustc-hash 1.1.0",
|
||||||
"text-size",
|
"text-size",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1680,7 +1680,7 @@ dependencies = [
|
||||||
"profile",
|
"profile",
|
||||||
"project-model",
|
"project-model",
|
||||||
"rayon",
|
"rayon",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"scip",
|
"scip",
|
||||||
"semver",
|
"semver",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -1717,6 +1717,12 @@ version = "1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-hash"
|
||||||
|
version = "2.0.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc_apfloat"
|
name = "rustc_apfloat"
|
||||||
version = "0.2.1+llvm-462a31f5a5ab"
|
version = "0.2.1+llvm-462a31f5a5ab"
|
||||||
|
@ -1746,7 +1752,7 @@ dependencies = [
|
||||||
"oorandom",
|
"oorandom",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"rand",
|
"rand",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"salsa-macros",
|
"salsa-macros",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
@ -1898,7 +1904,7 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"salsa",
|
"salsa",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
@ -1967,7 +1973,7 @@ dependencies = [
|
||||||
"ra-ap-rustc_lexer",
|
"ra-ap-rustc_lexer",
|
||||||
"rayon",
|
"rayon",
|
||||||
"rowan",
|
"rowan",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"rustc_apfloat",
|
"rustc_apfloat",
|
||||||
"smol_str",
|
"smol_str",
|
||||||
"stdx",
|
"stdx",
|
||||||
|
@ -1983,7 +1989,7 @@ version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"intern",
|
"intern",
|
||||||
"parser",
|
"parser",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"syntax",
|
"syntax",
|
||||||
|
@ -2000,7 +2006,7 @@ dependencies = [
|
||||||
"cfg",
|
"cfg",
|
||||||
"hir-expand",
|
"hir-expand",
|
||||||
"intern",
|
"intern",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"span",
|
"span",
|
||||||
"stdx",
|
"stdx",
|
||||||
"test-utils",
|
"test-utils",
|
||||||
|
@ -2014,7 +2020,7 @@ dependencies = [
|
||||||
"dissimilar",
|
"dissimilar",
|
||||||
"paths",
|
"paths",
|
||||||
"profile",
|
"profile",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"stdx",
|
"stdx",
|
||||||
"text-size",
|
"text-size",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
@ -2361,7 +2367,7 @@ dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"nohash-hasher",
|
"nohash-hasher",
|
||||||
"paths",
|
"paths",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"stdx",
|
"stdx",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
@ -2374,7 +2380,7 @@ dependencies = [
|
||||||
"notify",
|
"notify",
|
||||||
"paths",
|
"paths",
|
||||||
"rayon",
|
"rayon",
|
||||||
"rustc-hash",
|
"rustc-hash 2.0.0",
|
||||||
"stdx",
|
"stdx",
|
||||||
"tracing",
|
"tracing",
|
||||||
"vfs",
|
"vfs",
|
||||||
|
|
14
Cargo.toml
14
Cargo.toml
|
@ -85,18 +85,18 @@ tt = { path = "./crates/tt", version = "0.0.0" }
|
||||||
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||||
|
|
||||||
ra-ap-rustc_lexer = { version = "0.71.0", default-features = false }
|
ra-ap-rustc_lexer = { version = "0.73", default-features = false }
|
||||||
ra-ap-rustc_parse_format = { version = "0.71.0", default-features = false }
|
ra-ap-rustc_parse_format = { version = "0.73", default-features = false }
|
||||||
ra-ap-rustc_index = { version = "0.71.0", default-features = false }
|
ra-ap-rustc_index = { version = "0.73", default-features = false }
|
||||||
ra-ap-rustc_abi = { version = "0.71.0", default-features = false }
|
ra-ap-rustc_abi = { version = "0.73", default-features = false }
|
||||||
ra-ap-rustc_pattern_analysis = { version = "0.71.0", default-features = false }
|
ra-ap-rustc_pattern_analysis = { version = "0.73", default-features = false }
|
||||||
|
|
||||||
# local crates that aren't published to crates.io. These should not have versions.
|
# local crates that aren't published to crates.io. These should not have versions.
|
||||||
test-fixture = { path = "./crates/test-fixture" }
|
test-fixture = { path = "./crates/test-fixture" }
|
||||||
test-utils = { path = "./crates/test-utils" }
|
test-utils = { path = "./crates/test-utils" }
|
||||||
|
|
||||||
# In-tree crates that are published separately and follow semver. See lib/README.md
|
# In-tree crates that are published separately and follow semver. See lib/README.md
|
||||||
line-index = { version = "0.1.1" }
|
line-index = { version = "0.1.2" }
|
||||||
la-arena = { version = "0.3.1" }
|
la-arena = { version = "0.3.1" }
|
||||||
lsp-server = { version = "0.7.6" }
|
lsp-server = { version = "0.7.6" }
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ process-wrap = { version = "8.0.2", features = ["std"] }
|
||||||
pulldown-cmark-to-cmark = "10.0.4"
|
pulldown-cmark-to-cmark = "10.0.4"
|
||||||
pulldown-cmark = { version = "0.9.0", default-features = false }
|
pulldown-cmark = { version = "0.9.0", default-features = false }
|
||||||
rayon = "1.8.0"
|
rayon = "1.8.0"
|
||||||
rustc-hash = "1.1.0"
|
rustc-hash = "2.0.0"
|
||||||
semver = "1.0.14"
|
semver = "1.0.14"
|
||||||
serde = { version = "1.0.192", features = ["derive"] }
|
serde = { version = "1.0.192", features = ["derive"] }
|
||||||
serde_json = "1.0.108"
|
serde_json = "1.0.108"
|
||||||
|
|
|
@ -148,6 +148,10 @@ impl FunctionData {
|
||||||
self.flags.contains(FnFlags::HAS_UNSAFE_KW)
|
self.flags.contains(FnFlags::HAS_UNSAFE_KW)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_safe(&self) -> bool {
|
||||||
|
self.flags.contains(FnFlags::HAS_SAFE_KW)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_varargs(&self) -> bool {
|
pub fn is_varargs(&self) -> bool {
|
||||||
self.flags.contains(FnFlags::IS_VARARGS)
|
self.flags.contains(FnFlags::IS_VARARGS)
|
||||||
}
|
}
|
||||||
|
@ -567,6 +571,8 @@ pub struct StaticData {
|
||||||
pub visibility: RawVisibility,
|
pub visibility: RawVisibility,
|
||||||
pub mutable: bool,
|
pub mutable: bool,
|
||||||
pub is_extern: bool,
|
pub is_extern: bool,
|
||||||
|
pub has_safe_kw: bool,
|
||||||
|
pub has_unsafe_kw: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StaticData {
|
impl StaticData {
|
||||||
|
@ -581,6 +587,8 @@ impl StaticData {
|
||||||
visibility: item_tree[statik.visibility].clone(),
|
visibility: item_tree[statik.visibility].clone(),
|
||||||
mutable: statik.mutable,
|
mutable: statik.mutable,
|
||||||
is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
|
is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
|
||||||
|
has_safe_kw: statik.has_safe_kw,
|
||||||
|
has_unsafe_kw: statik.has_unsafe_kw,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -754,6 +754,7 @@ bitflags::bitflags! {
|
||||||
const HAS_ASYNC_KW = 1 << 4;
|
const HAS_ASYNC_KW = 1 << 4;
|
||||||
const HAS_UNSAFE_KW = 1 << 5;
|
const HAS_UNSAFE_KW = 1 << 5;
|
||||||
const IS_VARARGS = 1 << 6;
|
const IS_VARARGS = 1 << 6;
|
||||||
|
const HAS_SAFE_KW = 1 << 7;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -822,7 +823,10 @@ pub struct Const {
|
||||||
pub struct Static {
|
pub struct Static {
|
||||||
pub name: Name,
|
pub name: Name,
|
||||||
pub visibility: RawVisibilityId,
|
pub visibility: RawVisibilityId,
|
||||||
|
// TODO: use bitflags when we have more flags
|
||||||
pub mutable: bool,
|
pub mutable: bool,
|
||||||
|
pub has_safe_kw: bool,
|
||||||
|
pub has_unsafe_kw: bool,
|
||||||
pub type_ref: Interned<TypeRef>,
|
pub type_ref: Interned<TypeRef>,
|
||||||
pub ast_id: FileAstId<ast::Static>,
|
pub ast_id: FileAstId<ast::Static>,
|
||||||
}
|
}
|
||||||
|
|
|
@ -440,6 +440,9 @@ impl<'a> Ctx<'a> {
|
||||||
if func.unsafe_token().is_some() {
|
if func.unsafe_token().is_some() {
|
||||||
flags |= FnFlags::HAS_UNSAFE_KW;
|
flags |= FnFlags::HAS_UNSAFE_KW;
|
||||||
}
|
}
|
||||||
|
if func.safe_token().is_some() {
|
||||||
|
flags |= FnFlags::HAS_SAFE_KW;
|
||||||
|
}
|
||||||
if has_var_args {
|
if has_var_args {
|
||||||
flags |= FnFlags::IS_VARARGS;
|
flags |= FnFlags::IS_VARARGS;
|
||||||
}
|
}
|
||||||
|
@ -484,8 +487,11 @@ impl<'a> Ctx<'a> {
|
||||||
let type_ref = self.lower_type_ref_opt(static_.ty());
|
let type_ref = self.lower_type_ref_opt(static_.ty());
|
||||||
let visibility = self.lower_visibility(static_);
|
let visibility = self.lower_visibility(static_);
|
||||||
let mutable = static_.mut_token().is_some();
|
let mutable = static_.mut_token().is_some();
|
||||||
|
let has_safe_kw = static_.safe_token().is_some();
|
||||||
|
let has_unsafe_kw = static_.unsafe_token().is_some();
|
||||||
let ast_id = self.source_ast_id_map.ast_id(static_);
|
let ast_id = self.source_ast_id_map.ast_id(static_);
|
||||||
let res = Static { name, visibility, mutable, type_ref, ast_id };
|
let res =
|
||||||
|
Static { name, visibility, mutable, type_ref, ast_id, has_safe_kw, has_unsafe_kw };
|
||||||
Some(id(self.data().statics.alloc(res)))
|
Some(id(self.data().statics.alloc(res)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -278,6 +278,9 @@ impl Printer<'_> {
|
||||||
if flags.contains(FnFlags::HAS_UNSAFE_KW) {
|
if flags.contains(FnFlags::HAS_UNSAFE_KW) {
|
||||||
w!(self, "unsafe ");
|
w!(self, "unsafe ");
|
||||||
}
|
}
|
||||||
|
if flags.contains(FnFlags::HAS_SAFE_KW) {
|
||||||
|
w!(self, "safe ");
|
||||||
|
}
|
||||||
if let Some(abi) = abi {
|
if let Some(abi) = abi {
|
||||||
w!(self, "extern \"{}\" ", abi);
|
w!(self, "extern \"{}\" ", abi);
|
||||||
}
|
}
|
||||||
|
@ -379,9 +382,23 @@ impl Printer<'_> {
|
||||||
wln!(self, " = _;");
|
wln!(self, " = _;");
|
||||||
}
|
}
|
||||||
ModItem::Static(it) => {
|
ModItem::Static(it) => {
|
||||||
let Static { name, visibility, mutable, type_ref, ast_id } = &self.tree[it];
|
let Static {
|
||||||
|
name,
|
||||||
|
visibility,
|
||||||
|
mutable,
|
||||||
|
type_ref,
|
||||||
|
ast_id,
|
||||||
|
has_safe_kw,
|
||||||
|
has_unsafe_kw,
|
||||||
|
} = &self.tree[it];
|
||||||
self.print_ast_id(ast_id.erase());
|
self.print_ast_id(ast_id.erase());
|
||||||
self.print_visibility(*visibility);
|
self.print_visibility(*visibility);
|
||||||
|
if *has_safe_kw {
|
||||||
|
w!(self, "safe ");
|
||||||
|
}
|
||||||
|
if *has_unsafe_kw {
|
||||||
|
w!(self, "unsafe ");
|
||||||
|
}
|
||||||
w!(self, "static ");
|
w!(self, "static ");
|
||||||
if *mutable {
|
if *mutable {
|
||||||
w!(self, "mut ");
|
w!(self, "mut ");
|
||||||
|
|
|
@ -139,13 +139,11 @@ impl Visibility {
|
||||||
let def_map_block = def_map.block_id();
|
let def_map_block = def_map.block_id();
|
||||||
loop {
|
loop {
|
||||||
match (to_module.block, def_map_block) {
|
match (to_module.block, def_map_block) {
|
||||||
// to_module is not a block, so there is no parent def map to use
|
// `to_module` is not a block, so there is no parent def map to use.
|
||||||
(None, _) => (),
|
(None, _) => (),
|
||||||
|
// `to_module` is at `def_map`'s block, no need to move further.
|
||||||
(Some(a), Some(b)) if a == b => {
|
(Some(a), Some(b)) if a == b => {
|
||||||
cov_mark::hit!(is_visible_from_same_block_def_map);
|
cov_mark::hit!(is_visible_from_same_block_def_map);
|
||||||
if let Some(parent) = def_map.parent() {
|
|
||||||
to_module = parent;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
if let Some(parent) = to_module.def_map(db).parent() {
|
if let Some(parent) = to_module.def_map(db).parent() {
|
||||||
|
|
|
@ -5,19 +5,20 @@ use cfg::CfgExpr;
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use intern::{sym, Symbol};
|
use intern::{sym, Symbol};
|
||||||
use mbe::{expect_fragment, DelimiterKind};
|
use mbe::{expect_fragment, DelimiterKind};
|
||||||
use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
|
use span::{Edition, EditionedFileId, Span};
|
||||||
use stdx::format_to;
|
use stdx::format_to;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
format_smolstr,
|
format_smolstr,
|
||||||
unescape::{unescape_byte, unescape_char, unescape_unicode, Mode},
|
unescape::{unescape_byte, unescape_char, unescape_unicode, Mode},
|
||||||
};
|
};
|
||||||
use syntax_bridge::parse_to_token_tree;
|
use syntax_bridge::syntax_node_to_token_tree;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
builtin::quote::{dollar_crate, quote},
|
builtin::quote::{dollar_crate, quote},
|
||||||
db::ExpandDatabase,
|
db::ExpandDatabase,
|
||||||
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
|
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt},
|
||||||
name,
|
name,
|
||||||
|
span_map::SpanMap,
|
||||||
tt::{self, DelimSpan},
|
tt::{self, DelimSpan},
|
||||||
ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
|
ExpandError, ExpandResult, HirFileIdExt, Lookup as _, MacroCallId,
|
||||||
};
|
};
|
||||||
|
@ -739,18 +740,14 @@ fn include_expand(
|
||||||
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match parse_to_token_tree(
|
let span_map = db.real_span_map(file_id);
|
||||||
file_id.edition(),
|
// FIXME: Parse errors
|
||||||
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
ExpandResult::ok(syntax_node_to_token_tree(
|
||||||
SyntaxContextId::ROOT,
|
&db.parse(file_id).syntax_node(),
|
||||||
&db.file_text(file_id.file_id()),
|
SpanMap::RealSpanMap(span_map),
|
||||||
) {
|
span,
|
||||||
Some(it) => ExpandResult::ok(it),
|
syntax_bridge::DocCommentDesugarMode::ProcMacro,
|
||||||
None => ExpandResult::new(
|
))
|
||||||
tt::Subtree::empty(DelimSpan { open: span, close: span }),
|
|
||||||
ExpandError::other(span, "failed to parse included file"),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn include_input_to_file_id(
|
pub fn include_input_to_file_id(
|
||||||
|
|
|
@ -35,7 +35,7 @@ type MacroArgResult = (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span);
|
||||||
/// an error will be emitted.
|
/// an error will be emitted.
|
||||||
///
|
///
|
||||||
/// Actual max for `analysis-stats .` at some point: 30672.
|
/// Actual max for `analysis-stats .` at some point: 30672.
|
||||||
static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
|
static TOKEN_LIMIT: Limit = Limit::new(2_097_152);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum TokenExpander {
|
pub enum TokenExpander {
|
||||||
|
|
|
@ -95,7 +95,8 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
|
||||||
fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
|
fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
|
||||||
let mut err = String::new();
|
let mut err = String::new();
|
||||||
let span_formatter = |file, range| format!("{file:?} {range:?}");
|
let span_formatter = |file, range| format!("{file:?} {range:?}");
|
||||||
let edition = db.crate_graph()[db.test_crate()].edition;
|
let edition =
|
||||||
|
db.crate_graph()[*db.crate_graph().crates_in_topological_order().last().unwrap()].edition;
|
||||||
match e {
|
match e {
|
||||||
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter, edition),
|
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter, edition),
|
||||||
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter, edition),
|
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter, edition),
|
||||||
|
@ -2896,7 +2897,7 @@ fn recursive_adt() {
|
||||||
{
|
{
|
||||||
const VARIANT_TAG_TREE: TagTree = TagTree::Choice(
|
const VARIANT_TAG_TREE: TagTree = TagTree::Choice(
|
||||||
&[
|
&[
|
||||||
TagTree::Leaf,
|
TAG_TREE,
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
VARIANT_TAG_TREE
|
VARIANT_TAG_TREE
|
||||||
|
@ -2905,6 +2906,6 @@ fn recursive_adt() {
|
||||||
TAG_TREE
|
TAG_TREE
|
||||||
};
|
};
|
||||||
"#,
|
"#,
|
||||||
|e| matches!(e, ConstEvalError::MirEvalError(MirEvalError::StackOverflow)),
|
|e| matches!(e, ConstEvalError::MirLowerError(MirLowerError::Loop)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -89,7 +89,7 @@ fn walk_unsafe(
|
||||||
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path);
|
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path);
|
||||||
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
|
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
|
||||||
let static_data = db.static_data(id);
|
let static_data = db.static_data(id);
|
||||||
if static_data.mutable || static_data.is_extern {
|
if static_data.mutable || (static_data.is_extern && !static_data.has_safe_kw) {
|
||||||
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
|
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -917,9 +917,19 @@ impl<'a> InferenceTable<'a> {
|
||||||
/// Check if given type is `Sized` or not
|
/// Check if given type is `Sized` or not
|
||||||
pub(crate) fn is_sized(&mut self, ty: &Ty) -> bool {
|
pub(crate) fn is_sized(&mut self, ty: &Ty) -> bool {
|
||||||
// Early return for some obvious types
|
// Early return for some obvious types
|
||||||
if matches!(ty.kind(Interner), TyKind::Scalar(..) | TyKind::Ref(..) | TyKind::Raw(..)) {
|
if matches!(
|
||||||
|
ty.kind(Interner),
|
||||||
|
TyKind::Scalar(..)
|
||||||
|
| TyKind::Ref(..)
|
||||||
|
| TyKind::Raw(..)
|
||||||
|
| TyKind::Never
|
||||||
|
| TyKind::FnDef(..)
|
||||||
|
| TyKind::Array(..)
|
||||||
|
| TyKind::Function(_)
|
||||||
|
) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((AdtId::StructId(id), subst)) = ty.as_adt() {
|
if let Some((AdtId::StructId(id), subst)) = ty.as_adt() {
|
||||||
let struct_data = self.db.struct_data(id);
|
let struct_data = self.db.struct_data(id);
|
||||||
if let Some((last_field, _)) = struct_data.variant_data.fields().iter().last() {
|
if let Some((last_field, _)) = struct_data.variant_data.fields().iter().last() {
|
||||||
|
|
|
@ -72,18 +72,15 @@ pub type Variants = hir_def::layout::Variants<RustcFieldIdx, RustcEnumVariantIdx
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub enum LayoutError {
|
pub enum LayoutError {
|
||||||
// FIXME: Remove variants that duplicate LayoutCalculatorError's variants after sync
|
// FIXME: Remove more variants once they get added to LayoutCalculatorError
|
||||||
BadCalc(LayoutCalculatorError<()>),
|
BadCalc(LayoutCalculatorError<()>),
|
||||||
EmptyUnion,
|
|
||||||
HasErrorConst,
|
HasErrorConst,
|
||||||
HasErrorType,
|
HasErrorType,
|
||||||
HasPlaceholder,
|
HasPlaceholder,
|
||||||
InvalidSimdType,
|
InvalidSimdType,
|
||||||
NotImplemented,
|
NotImplemented,
|
||||||
RecursiveTypeWithoutIndirection,
|
RecursiveTypeWithoutIndirection,
|
||||||
SizeOverflow,
|
|
||||||
TargetLayoutNotAvailable,
|
TargetLayoutNotAvailable,
|
||||||
UnexpectedUnsized,
|
|
||||||
Unknown,
|
Unknown,
|
||||||
UserReprTooSmall,
|
UserReprTooSmall,
|
||||||
}
|
}
|
||||||
|
@ -93,7 +90,6 @@ impl fmt::Display for LayoutError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
LayoutError::BadCalc(err) => err.fallback_fmt(f),
|
LayoutError::BadCalc(err) => err.fallback_fmt(f),
|
||||||
LayoutError::EmptyUnion => write!(f, "type is an union with no fields"),
|
|
||||||
LayoutError::HasErrorConst => write!(f, "type contains an unevaluatable const"),
|
LayoutError::HasErrorConst => write!(f, "type contains an unevaluatable const"),
|
||||||
LayoutError::HasErrorType => write!(f, "type contains an error"),
|
LayoutError::HasErrorType => write!(f, "type contains an error"),
|
||||||
LayoutError::HasPlaceholder => write!(f, "type contains placeholders"),
|
LayoutError::HasPlaceholder => write!(f, "type contains placeholders"),
|
||||||
|
@ -102,11 +98,7 @@ impl fmt::Display for LayoutError {
|
||||||
LayoutError::RecursiveTypeWithoutIndirection => {
|
LayoutError::RecursiveTypeWithoutIndirection => {
|
||||||
write!(f, "recursive type without indirection")
|
write!(f, "recursive type without indirection")
|
||||||
}
|
}
|
||||||
LayoutError::SizeOverflow => write!(f, "size overflow"),
|
|
||||||
LayoutError::TargetLayoutNotAvailable => write!(f, "target layout not available"),
|
LayoutError::TargetLayoutNotAvailable => write!(f, "target layout not available"),
|
||||||
LayoutError::UnexpectedUnsized => {
|
|
||||||
write!(f, "an unsized type was found where a sized type was expected")
|
|
||||||
}
|
|
||||||
LayoutError::Unknown => write!(f, "unknown"),
|
LayoutError::Unknown => write!(f, "unknown"),
|
||||||
LayoutError::UserReprTooSmall => {
|
LayoutError::UserReprTooSmall => {
|
||||||
write!(f, "the `#[repr]` hint is too small to hold the discriminants of the enum")
|
write!(f, "the `#[repr]` hint is too small to hold the discriminants of the enum")
|
||||||
|
@ -181,7 +173,10 @@ fn layout_of_simd_ty(
|
||||||
};
|
};
|
||||||
|
|
||||||
// Compute the size and alignment of the vector:
|
// Compute the size and alignment of the vector:
|
||||||
let size = e_ly.size.checked_mul(e_len, dl).ok_or(LayoutError::SizeOverflow)?;
|
let size = e_ly
|
||||||
|
.size
|
||||||
|
.checked_mul(e_len, dl)
|
||||||
|
.ok_or(LayoutError::BadCalc(LayoutCalculatorError::SizeOverflow))?;
|
||||||
let align = dl.vector_align(size);
|
let align = dl.vector_align(size);
|
||||||
let size = size.align_to(align.abi);
|
let size = size.align_to(align.abi);
|
||||||
|
|
||||||
|
@ -294,7 +289,10 @@ pub fn layout_of_ty_query(
|
||||||
TyKind::Array(element, count) => {
|
TyKind::Array(element, count) => {
|
||||||
let count = try_const_usize(db, count).ok_or(LayoutError::HasErrorConst)? as u64;
|
let count = try_const_usize(db, count).ok_or(LayoutError::HasErrorConst)? as u64;
|
||||||
let element = db.layout_of_ty(element.clone(), trait_env)?;
|
let element = db.layout_of_ty(element.clone(), trait_env)?;
|
||||||
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
|
let size = element
|
||||||
|
.size
|
||||||
|
.checked_mul(count, dl)
|
||||||
|
.ok_or(LayoutError::BadCalc(LayoutCalculatorError::SizeOverflow))?;
|
||||||
|
|
||||||
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
|
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
|
||||||
Abi::Uninhabited
|
Abi::Uninhabited
|
||||||
|
|
|
@ -51,10 +51,7 @@ mod test_db;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
use std::{
|
use std::hash::Hash;
|
||||||
collections::hash_map::Entry,
|
|
||||||
hash::{BuildHasherDefault, Hash},
|
|
||||||
};
|
|
||||||
|
|
||||||
use base_db::ra_salsa::InternValueTrivial;
|
use base_db::ra_salsa::InternValueTrivial;
|
||||||
use chalk_ir::{
|
use chalk_ir::{
|
||||||
|
@ -65,10 +62,11 @@ use chalk_ir::{
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use hir_def::{hir::ExprId, type_ref::Rawness, CallableDefId, GeneralConstId, TypeOrConstParamId};
|
use hir_def::{hir::ExprId, type_ref::Rawness, CallableDefId, GeneralConstId, TypeOrConstParamId};
|
||||||
use hir_expand::name::Name;
|
use hir_expand::name::Name;
|
||||||
|
use indexmap::{map::Entry, IndexMap};
|
||||||
use intern::{sym, Symbol};
|
use intern::{sym, Symbol};
|
||||||
use la_arena::{Arena, Idx};
|
use la_arena::{Arena, Idx};
|
||||||
use mir::{MirEvalError, VTableMap};
|
use mir::{MirEvalError, VTableMap};
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
|
||||||
use span::Edition;
|
use span::Edition;
|
||||||
use syntax::ast::{make, ConstArg};
|
use syntax::ast::{make, ConstArg};
|
||||||
use traits::FnTrait;
|
use traits::FnTrait;
|
||||||
|
@ -199,7 +197,7 @@ pub enum MemoryMap {
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||||
pub struct ComplexMemoryMap {
|
pub struct ComplexMemoryMap {
|
||||||
memory: FxHashMap<usize, Box<[u8]>>,
|
memory: IndexMap<usize, Box<[u8]>, FxBuildHasher>,
|
||||||
vtable: VTableMap,
|
vtable: VTableMap,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -245,7 +243,7 @@ impl MemoryMap {
|
||||||
match self {
|
match self {
|
||||||
MemoryMap::Empty => Ok(Default::default()),
|
MemoryMap::Empty => Ok(Default::default()),
|
||||||
MemoryMap::Simple(m) => transform((&0, m)).map(|(addr, val)| {
|
MemoryMap::Simple(m) => transform((&0, m)).map(|(addr, val)| {
|
||||||
let mut map = FxHashMap::with_capacity_and_hasher(1, BuildHasherDefault::default());
|
let mut map = FxHashMap::with_capacity_and_hasher(1, rustc_hash::FxBuildHasher);
|
||||||
map.insert(addr, val);
|
map.insert(addr, val);
|
||||||
map
|
map
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -257,10 +257,12 @@ pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
match func.lookup(db.upcast()).container {
|
let loc = func.lookup(db.upcast());
|
||||||
|
match loc.container {
|
||||||
hir_def::ItemContainerId::ExternBlockId(block) => {
|
hir_def::ItemContainerId::ExternBlockId(block) => {
|
||||||
// Function in an `extern` block are always unsafe to call, except when it has
|
// Function in an `extern` block are always unsafe to call, except when
|
||||||
// `"rust-intrinsic"` ABI there are a few exceptions.
|
// it is marked as `safe` or it has `"rust-intrinsic"` ABI there are a
|
||||||
|
// few exceptions.
|
||||||
let id = block.lookup(db.upcast()).id;
|
let id = block.lookup(db.upcast()).id;
|
||||||
|
|
||||||
let is_intrinsic =
|
let is_intrinsic =
|
||||||
|
@ -270,8 +272,8 @@ pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
|
||||||
// Intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
|
// Intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
|
||||||
!data.attrs.by_key(&sym::rustc_safe_intrinsic).exists()
|
!data.attrs.by_key(&sym::rustc_safe_intrinsic).exists()
|
||||||
} else {
|
} else {
|
||||||
// Extern items are always unsafe
|
// Extern items without `safe` modifier are always unsafe
|
||||||
true
|
!data.is_safe()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
|
|
|
@ -892,29 +892,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
|
f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
|
||||||
) -> Option<T> {
|
) -> Option<T> {
|
||||||
let _p = tracing::info_span!("descend_into_macros_impl").entered();
|
let _p = tracing::info_span!("descend_into_macros_impl").entered();
|
||||||
let (sa, span, file_id) = token
|
|
||||||
.parent()
|
|
||||||
.and_then(|parent| {
|
|
||||||
self.analyze_impl(InRealFile::new(file_id, &parent).into(), None, false)
|
|
||||||
})
|
|
||||||
.and_then(|sa| {
|
|
||||||
let file_id = sa.file_id.file_id()?;
|
|
||||||
Some((
|
|
||||||
sa,
|
|
||||||
self.db.real_span_map(file_id).span_for_range(token.text_range()),
|
|
||||||
HirFileId::from(file_id),
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut m_cache = self.macro_call_cache.borrow_mut();
|
let span = self.db.real_span_map(file_id).span_for_range(token.text_range());
|
||||||
let def_map = sa.resolver.def_map();
|
|
||||||
|
|
||||||
// A stack of tokens to process, along with the file they came from
|
|
||||||
// These are tracked to know which macro calls we still have to look into
|
|
||||||
// the tokens themselves aren't that interesting as the span that is being used to map
|
|
||||||
// things down never changes.
|
|
||||||
let mut stack: Vec<(_, SmallVec<[_; 2]>)> =
|
|
||||||
vec![(file_id, smallvec![(token, SyntaxContextId::ROOT)])];
|
|
||||||
|
|
||||||
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
|
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
|
||||||
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
|
||||||
|
@ -926,7 +905,6 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
.map(SmallVec::<[_; 2]>::from_iter),
|
.map(SmallVec::<[_; 2]>::from_iter),
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// we have found a mapping for the token if the vec is non-empty
|
// we have found a mapping for the token if the vec is non-empty
|
||||||
let res = mapped_tokens.is_empty().not().then_some(());
|
let res = mapped_tokens.is_empty().not().then_some(());
|
||||||
// requeue the tokens we got from mapping our current token down
|
// requeue the tokens we got from mapping our current token down
|
||||||
|
@ -934,6 +912,33 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
res
|
res
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// A stack of tokens to process, along with the file they came from
|
||||||
|
// These are tracked to know which macro calls we still have to look into
|
||||||
|
// the tokens themselves aren't that interesting as the span that is being used to map
|
||||||
|
// things down never changes.
|
||||||
|
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
|
||||||
|
let include = self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, file_id);
|
||||||
|
match include {
|
||||||
|
Some(include) => {
|
||||||
|
// include! inputs are always from real files, so they only need to be handled once upfront
|
||||||
|
process_expansion_for_token(&mut stack, include)?;
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
stack.push((file_id.into(), smallvec![(token, SyntaxContextId::ROOT)]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let (file_id, tokens) = stack.first()?;
|
||||||
|
// make sure we pick the token in the expanded include if we encountered an include,
|
||||||
|
// otherwise we'll get the wrong semantics
|
||||||
|
let sa =
|
||||||
|
tokens.first()?.0.parent().and_then(|parent| {
|
||||||
|
self.analyze_impl(InFile::new(*file_id, &parent), None, false)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut m_cache = self.macro_call_cache.borrow_mut();
|
||||||
|
let def_map = sa.resolver.def_map();
|
||||||
|
|
||||||
// Filters out all tokens that contain the given range (usually the macro call), any such
|
// Filters out all tokens that contain the given range (usually the macro call), any such
|
||||||
// token is redundant as the corresponding macro call has already been processed
|
// token is redundant as the corresponding macro call has already been processed
|
||||||
let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
|
let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
|
||||||
|
@ -1011,6 +1016,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
) {
|
) {
|
||||||
call.as_macro_file()
|
call.as_macro_file()
|
||||||
} else {
|
} else {
|
||||||
|
// FIXME: This is wrong, the SourceAnalyzer might be invalid here
|
||||||
sa.expand(self.db, mcall.as_ref())?
|
sa.expand(self.db, mcall.as_ref())?
|
||||||
};
|
};
|
||||||
m_cache.insert(mcall, it);
|
m_cache.insert(mcall, it);
|
||||||
|
|
|
@ -104,7 +104,7 @@ use hir_expand::{
|
||||||
};
|
};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use span::{FileId, MacroFileId};
|
use span::{EditionedFileId, FileId, MacroFileId};
|
||||||
use stdx::impl_from;
|
use stdx::impl_from;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasName},
|
ast::{self, HasName},
|
||||||
|
@ -118,9 +118,27 @@ pub(super) struct SourceToDefCache {
|
||||||
pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
|
pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
|
||||||
expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
|
expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
|
||||||
pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
|
pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
|
||||||
|
pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroFileId>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceToDefCache {
|
impl SourceToDefCache {
|
||||||
|
pub(super) fn get_or_insert_include_for(
|
||||||
|
&mut self,
|
||||||
|
db: &dyn HirDatabase,
|
||||||
|
file: EditionedFileId,
|
||||||
|
) -> Option<MacroFileId> {
|
||||||
|
if let Some(&m) = self.included_file_cache.get(&file) {
|
||||||
|
return m;
|
||||||
|
}
|
||||||
|
self.included_file_cache.insert(file, None);
|
||||||
|
for &crate_id in db.relevant_crates(file.into()).iter() {
|
||||||
|
db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
|
||||||
|
self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id }));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
self.included_file_cache.get(&file).copied().flatten()
|
||||||
|
}
|
||||||
|
|
||||||
pub(super) fn get_or_insert_expansion(
|
pub(super) fn get_or_insert_expansion(
|
||||||
&mut self,
|
&mut self,
|
||||||
sema: &SemanticsImpl<'_>,
|
sema: &SemanticsImpl<'_>,
|
||||||
|
@ -163,9 +181,13 @@ impl SourceToDefCtx<'_, '_> {
|
||||||
.include_macro_invoc(crate_id)
|
.include_macro_invoc(crate_id)
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|&&(_, file_id)| file_id == file)
|
.filter(|&&(_, file_id)| file_id == file)
|
||||||
.flat_map(|(call, _)| {
|
.flat_map(|&(macro_call_id, file_id)| {
|
||||||
|
self.cache
|
||||||
|
.included_file_cache
|
||||||
|
.insert(file_id, Some(MacroFileId { macro_call_id }));
|
||||||
modules(
|
modules(
|
||||||
call.lookup(self.db.upcast())
|
macro_call_id
|
||||||
|
.lookup(self.db.upcast())
|
||||||
.kind
|
.kind
|
||||||
.file_id()
|
.file_id()
|
||||||
.original_file(self.db.upcast())
|
.original_file(self.db.upcast())
|
||||||
|
|
|
@ -1095,6 +1095,7 @@ fn main() {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn field_enum_cross_file() {
|
fn field_enum_cross_file() {
|
||||||
|
// FIXME: The import is missing
|
||||||
check_assist(
|
check_assist(
|
||||||
bool_to_enum,
|
bool_to_enum,
|
||||||
r#"
|
r#"
|
||||||
|
@ -1132,7 +1133,7 @@ fn foo() {
|
||||||
}
|
}
|
||||||
|
|
||||||
//- /main.rs
|
//- /main.rs
|
||||||
use foo::{Bool, Foo};
|
use foo::Foo;
|
||||||
|
|
||||||
mod foo;
|
mod foo;
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load diff
2229
crates/ide-assists/src/handlers/unwrap_return_type.rs
Normal file
2229
crates/ide-assists/src/handlers/unwrap_return_type.rs
Normal file
File diff suppressed because it is too large
Load diff
2457
crates/ide-assists/src/handlers/wrap_return_type.rs
Normal file
2457
crates/ide-assists/src/handlers/wrap_return_type.rs
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -223,9 +223,9 @@ mod handlers {
|
||||||
mod unnecessary_async;
|
mod unnecessary_async;
|
||||||
mod unqualify_method_call;
|
mod unqualify_method_call;
|
||||||
mod unwrap_block;
|
mod unwrap_block;
|
||||||
mod unwrap_result_return_type;
|
mod unwrap_return_type;
|
||||||
mod unwrap_tuple;
|
mod unwrap_tuple;
|
||||||
mod wrap_return_type_in_result;
|
mod wrap_return_type;
|
||||||
mod wrap_unwrap_cfg_attr;
|
mod wrap_unwrap_cfg_attr;
|
||||||
|
|
||||||
pub(crate) fn all() -> &'static [Handler] {
|
pub(crate) fn all() -> &'static [Handler] {
|
||||||
|
@ -355,10 +355,10 @@ mod handlers {
|
||||||
unmerge_use::unmerge_use,
|
unmerge_use::unmerge_use,
|
||||||
unnecessary_async::unnecessary_async,
|
unnecessary_async::unnecessary_async,
|
||||||
unwrap_block::unwrap_block,
|
unwrap_block::unwrap_block,
|
||||||
unwrap_result_return_type::unwrap_result_return_type,
|
unwrap_return_type::unwrap_return_type,
|
||||||
unwrap_tuple::unwrap_tuple,
|
unwrap_tuple::unwrap_tuple,
|
||||||
unqualify_method_call::unqualify_method_call,
|
unqualify_method_call::unqualify_method_call,
|
||||||
wrap_return_type_in_result::wrap_return_type_in_result,
|
wrap_return_type::wrap_return_type,
|
||||||
wrap_unwrap_cfg_attr::wrap_unwrap_cfg_attr,
|
wrap_unwrap_cfg_attr::wrap_unwrap_cfg_attr,
|
||||||
|
|
||||||
// These are manually sorted for better priorities. By default,
|
// These are manually sorted for better priorities. By default,
|
||||||
|
|
|
@ -3264,6 +3264,20 @@ fn foo() {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn doctest_unwrap_option_return_type() {
|
||||||
|
check_doc_test(
|
||||||
|
"unwrap_option_return_type",
|
||||||
|
r#####"
|
||||||
|
//- minicore: option
|
||||||
|
fn foo() -> Option<i32>$0 { Some(42i32) }
|
||||||
|
"#####,
|
||||||
|
r#####"
|
||||||
|
fn foo() -> i32 { 42i32 }
|
||||||
|
"#####,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn doctest_unwrap_result_return_type() {
|
fn doctest_unwrap_result_return_type() {
|
||||||
check_doc_test(
|
check_doc_test(
|
||||||
|
@ -3297,6 +3311,20 @@ fn main() {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn doctest_wrap_return_type_in_option() {
|
||||||
|
check_doc_test(
|
||||||
|
"wrap_return_type_in_option",
|
||||||
|
r#####"
|
||||||
|
//- minicore: option
|
||||||
|
fn foo() -> i32$0 { 42i32 }
|
||||||
|
"#####,
|
||||||
|
r#####"
|
||||||
|
fn foo() -> Option<i32> { Some(42i32) }
|
||||||
|
"#####,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn doctest_wrap_return_type_in_result() {
|
fn doctest_wrap_return_type_in_result() {
|
||||||
check_doc_test(
|
check_doc_test(
|
||||||
|
|
|
@ -29,7 +29,9 @@ pub(crate) fn complete_item_list(
|
||||||
kind: &ItemListKind,
|
kind: &ItemListKind,
|
||||||
) {
|
) {
|
||||||
let _p = tracing::info_span!("complete_item_list").entered();
|
let _p = tracing::info_span!("complete_item_list").entered();
|
||||||
if path_ctx.is_trivial_path() {
|
|
||||||
|
// We handle completions for trait-impls in [`item_list::trait_impl`]
|
||||||
|
if path_ctx.is_trivial_path() && !matches!(kind, ItemListKind::TraitImpl(_)) {
|
||||||
add_keywords(acc, ctx, Some(kind));
|
add_keywords(acc, ctx, Some(kind));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,73 +77,95 @@ fn add_keywords(acc: &mut Completions, ctx: &CompletionContext<'_>, kind: Option
|
||||||
|
|
||||||
let in_item_list = matches!(kind, Some(ItemListKind::SourceFile | ItemListKind::Module) | None);
|
let in_item_list = matches!(kind, Some(ItemListKind::SourceFile | ItemListKind::Module) | None);
|
||||||
let in_assoc_non_trait_impl = matches!(kind, Some(ItemListKind::Impl | ItemListKind::Trait));
|
let in_assoc_non_trait_impl = matches!(kind, Some(ItemListKind::Impl | ItemListKind::Trait));
|
||||||
let in_extern_block = matches!(kind, Some(ItemListKind::ExternBlock));
|
|
||||||
|
let in_extern_block = matches!(kind, Some(ItemListKind::ExternBlock { .. }));
|
||||||
|
let in_unsafe_extern_block =
|
||||||
|
matches!(kind, Some(ItemListKind::ExternBlock { is_unsafe: true }));
|
||||||
|
|
||||||
let in_trait = matches!(kind, Some(ItemListKind::Trait));
|
let in_trait = matches!(kind, Some(ItemListKind::Trait));
|
||||||
let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl(_)));
|
|
||||||
let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
|
let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
|
||||||
let no_vis_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
|
|
||||||
let in_block = kind.is_none();
|
let in_block = kind.is_none();
|
||||||
|
|
||||||
let missing_qualifiers = [
|
let no_vis_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
|
||||||
ctx.qualifier_ctx.unsafe_tok.is_none().then_some(("unsafe", "unsafe $0")),
|
let has_unsafe_kw = ctx.qualifier_ctx.unsafe_tok.is_some();
|
||||||
ctx.qualifier_ctx.async_tok.is_none().then_some(("async", "async $0")),
|
let has_async_kw = ctx.qualifier_ctx.async_tok.is_some();
|
||||||
];
|
let has_safe_kw = ctx.qualifier_ctx.safe_tok.is_some();
|
||||||
|
|
||||||
if !in_trait_impl {
|
// Some keywords are invalid after non-vis qualifiers, so we handle them first.
|
||||||
// handle qualifier tokens
|
if (has_unsafe_kw || has_safe_kw) && in_extern_block {
|
||||||
if missing_qualifiers.iter().any(Option::is_none) {
|
add_keyword("fn", "fn $1($2);");
|
||||||
// only complete missing qualifiers
|
add_keyword("static", "static $1: $2;");
|
||||||
missing_qualifiers.iter().filter_map(|x| *x).for_each(|(kw, snippet)| {
|
return;
|
||||||
add_keyword(kw, snippet);
|
}
|
||||||
});
|
|
||||||
|
|
||||||
if in_item_list || in_assoc_non_trait_impl {
|
if has_unsafe_kw || has_async_kw {
|
||||||
add_keyword("fn", "fn $1($2) {\n $0\n}");
|
if !has_unsafe_kw {
|
||||||
}
|
add_keyword("unsafe", "unsafe $0");
|
||||||
|
}
|
||||||
if ctx.qualifier_ctx.unsafe_tok.is_some() && in_item_list {
|
if !has_async_kw {
|
||||||
add_keyword("trait", "trait $1 {\n $0\n}");
|
add_keyword("async", "async $0");
|
||||||
if no_vis_qualifiers {
|
|
||||||
add_keyword("impl", "impl $1 {\n $0\n}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if in_item_list {
|
if in_item_list || in_assoc_non_trait_impl {
|
||||||
add_keyword("enum", "enum $1 {\n $0\n}");
|
add_keyword("fn", "fn $1($2) {\n $0\n}");
|
||||||
add_keyword("mod", "mod $0");
|
}
|
||||||
add_keyword("static", "static $0");
|
|
||||||
add_keyword("struct", "struct $0");
|
if has_unsafe_kw && in_item_list {
|
||||||
add_keyword("trait", "trait $1 {\n $0\n}");
|
add_keyword("trait", "trait $1 {\n $0\n}");
|
||||||
add_keyword("union", "union $1 {\n $0\n}");
|
|
||||||
add_keyword("use", "use $0");
|
|
||||||
if no_vis_qualifiers {
|
if no_vis_qualifiers {
|
||||||
add_keyword("impl", "impl $1 {\n $0\n}");
|
add_keyword("impl", "impl $1 {\n $0\n}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !in_trait && !in_block && no_vis_qualifiers {
|
if !has_async_kw && no_vis_qualifiers && in_item_list {
|
||||||
add_keyword("pub(crate)", "pub(crate) $0");
|
add_keyword("extern", "extern $0");
|
||||||
add_keyword("pub(super)", "pub(super) $0");
|
|
||||||
add_keyword("pub", "pub $0");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if in_extern_block {
|
return;
|
||||||
add_keyword("fn", "fn $1($2);");
|
}
|
||||||
} else {
|
|
||||||
if !in_inherent_impl {
|
|
||||||
if !in_trait {
|
|
||||||
add_keyword("extern", "extern $0");
|
|
||||||
}
|
|
||||||
add_keyword("type", "type $0");
|
|
||||||
}
|
|
||||||
|
|
||||||
add_keyword("fn", "fn $1($2) {\n $0\n}");
|
// ...and the rest deals with cases without any non-vis qualifiers.
|
||||||
add_keyword("unsafe", "unsafe $0");
|
|
||||||
add_keyword("const", "const $0");
|
// Visibility qualifiers
|
||||||
add_keyword("async", "async $0");
|
if !in_trait && !in_block && no_vis_qualifiers {
|
||||||
|
add_keyword("pub(crate)", "pub(crate) $0");
|
||||||
|
add_keyword("pub(super)", "pub(super) $0");
|
||||||
|
add_keyword("pub", "pub $0");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keywords that are valid in `item_list`
|
||||||
|
if in_item_list {
|
||||||
|
add_keyword("enum", "enum $1 {\n $0\n}");
|
||||||
|
add_keyword("mod", "mod $0");
|
||||||
|
add_keyword("static", "static $0");
|
||||||
|
add_keyword("struct", "struct $0");
|
||||||
|
add_keyword("trait", "trait $1 {\n $0\n}");
|
||||||
|
add_keyword("union", "union $1 {\n $0\n}");
|
||||||
|
add_keyword("use", "use $0");
|
||||||
|
if no_vis_qualifiers {
|
||||||
|
add_keyword("impl", "impl $1 {\n $0\n}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if in_extern_block {
|
||||||
|
add_keyword("unsafe", "unsafe $0");
|
||||||
|
if in_unsafe_extern_block {
|
||||||
|
add_keyword("safe", "safe $0");
|
||||||
|
}
|
||||||
|
|
||||||
|
add_keyword("fn", "fn $1($2);");
|
||||||
|
add_keyword("static", "static $1: $2;");
|
||||||
|
} else {
|
||||||
|
if !in_inherent_impl {
|
||||||
|
if !in_trait {
|
||||||
|
add_keyword("extern", "extern $0");
|
||||||
|
}
|
||||||
|
add_keyword("type", "type $0");
|
||||||
|
}
|
||||||
|
|
||||||
|
add_keyword("fn", "fn $1($2) {\n $0\n}");
|
||||||
|
add_keyword("unsafe", "unsafe $0");
|
||||||
|
add_keyword("const", "const $0");
|
||||||
|
add_keyword("async", "async $0");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,6 +58,7 @@ mod tests {
|
||||||
r"fn my_fn() { unsafe $0 }",
|
r"fn my_fn() { unsafe $0 }",
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
kw async
|
kw async
|
||||||
|
kw extern
|
||||||
kw fn
|
kw fn
|
||||||
kw impl
|
kw impl
|
||||||
kw trait
|
kw trait
|
||||||
|
|
|
@ -48,12 +48,16 @@ pub(crate) struct QualifierCtx {
|
||||||
// TODO: Add try_tok and default_tok
|
// TODO: Add try_tok and default_tok
|
||||||
pub(crate) async_tok: Option<SyntaxToken>,
|
pub(crate) async_tok: Option<SyntaxToken>,
|
||||||
pub(crate) unsafe_tok: Option<SyntaxToken>,
|
pub(crate) unsafe_tok: Option<SyntaxToken>,
|
||||||
|
pub(crate) safe_tok: Option<SyntaxToken>,
|
||||||
pub(crate) vis_node: Option<ast::Visibility>,
|
pub(crate) vis_node: Option<ast::Visibility>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl QualifierCtx {
|
impl QualifierCtx {
|
||||||
pub(crate) fn none(&self) -> bool {
|
pub(crate) fn none(&self) -> bool {
|
||||||
self.async_tok.is_none() && self.unsafe_tok.is_none() && self.vis_node.is_none()
|
self.async_tok.is_none()
|
||||||
|
&& self.unsafe_tok.is_none()
|
||||||
|
&& self.safe_tok.is_none()
|
||||||
|
&& self.vis_node.is_none()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -229,7 +233,7 @@ pub(crate) enum ItemListKind {
|
||||||
Impl,
|
Impl,
|
||||||
TraitImpl(Option<ast::Impl>),
|
TraitImpl(Option<ast::Impl>),
|
||||||
Trait,
|
Trait,
|
||||||
ExternBlock,
|
ExternBlock { is_unsafe: bool },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
|
@ -1108,7 +1108,14 @@ fn classify_name_ref(
|
||||||
},
|
},
|
||||||
None => return None,
|
None => return None,
|
||||||
} },
|
} },
|
||||||
ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
|
ast::ExternItemList(it) => {
|
||||||
|
let exn_blk = it.syntax().parent().and_then(ast::ExternBlock::cast);
|
||||||
|
PathKind::Item {
|
||||||
|
kind: ItemListKind::ExternBlock {
|
||||||
|
is_unsafe: exn_blk.and_then(|it| it.unsafe_token()).is_some(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
|
ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
|
@ -1310,6 +1317,7 @@ fn classify_name_ref(
|
||||||
match token.kind() {
|
match token.kind() {
|
||||||
SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token),
|
SyntaxKind::UNSAFE_KW => qualifier_ctx.unsafe_tok = Some(token),
|
||||||
SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token),
|
SyntaxKind::ASYNC_KW => qualifier_ctx.async_tok = Some(token),
|
||||||
|
SyntaxKind::SAFE_KW => qualifier_ctx.safe_tok = Some(token),
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1853,8 +1853,8 @@ fn f() { A { bar: b$0 }; }
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
fn bar() [type+name]
|
fn bar() [type+name]
|
||||||
fn baz() [type]
|
fn baz() [type]
|
||||||
ex baz() [type]
|
|
||||||
ex bar() [type]
|
ex bar() [type]
|
||||||
|
ex baz() [type]
|
||||||
st A []
|
st A []
|
||||||
fn f() []
|
fn f() []
|
||||||
"#]],
|
"#]],
|
||||||
|
|
|
@ -124,6 +124,7 @@ fn after_unsafe_token() {
|
||||||
r#"unsafe $0"#,
|
r#"unsafe $0"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
kw async
|
kw async
|
||||||
|
kw extern
|
||||||
kw fn
|
kw fn
|
||||||
kw impl
|
kw impl
|
||||||
kw trait
|
kw trait
|
||||||
|
@ -495,3 +496,57 @@ type O = $0;
|
||||||
",
|
",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inside_extern_blocks() {
|
||||||
|
// Should suggest `fn`, `static`, `unsafe`
|
||||||
|
check(
|
||||||
|
r#"extern { $0 }"#,
|
||||||
|
expect![[r#"
|
||||||
|
ma makro!(…) macro_rules! makro
|
||||||
|
md module
|
||||||
|
kw crate::
|
||||||
|
kw fn
|
||||||
|
kw pub
|
||||||
|
kw pub(crate)
|
||||||
|
kw pub(super)
|
||||||
|
kw self::
|
||||||
|
kw static
|
||||||
|
kw unsafe
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Should suggest `fn`, `static`, `safe`, `unsafe`
|
||||||
|
check(
|
||||||
|
r#"unsafe extern { $0 }"#,
|
||||||
|
expect![[r#"
|
||||||
|
ma makro!(…) macro_rules! makro
|
||||||
|
md module
|
||||||
|
kw crate::
|
||||||
|
kw fn
|
||||||
|
kw pub
|
||||||
|
kw pub(crate)
|
||||||
|
kw pub(super)
|
||||||
|
kw safe
|
||||||
|
kw self::
|
||||||
|
kw static
|
||||||
|
kw unsafe
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
|
||||||
|
check(
|
||||||
|
r#"unsafe extern { pub safe $0 }"#,
|
||||||
|
expect![[r#"
|
||||||
|
kw fn
|
||||||
|
kw static
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
|
||||||
|
check(
|
||||||
|
r#"unsafe extern { pub unsafe $0 }"#,
|
||||||
|
expect![[r#"
|
||||||
|
kw fn
|
||||||
|
kw static
|
||||||
|
"#]],
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
|
@ -923,3 +923,21 @@ fn foo() {
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn private_item_in_module_in_function_body() {
|
||||||
|
check_empty(
|
||||||
|
r#"
|
||||||
|
fn main() {
|
||||||
|
mod foo {
|
||||||
|
struct Private;
|
||||||
|
pub struct Public;
|
||||||
|
}
|
||||||
|
foo::$0
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
st Public Public
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
@ -595,4 +595,39 @@ unsafe fn foo(p: *mut i32) {
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_unsafe_diagnostic_with_safe_kw() {
|
||||||
|
check_diagnostics(
|
||||||
|
r#"
|
||||||
|
unsafe extern {
|
||||||
|
pub safe fn f();
|
||||||
|
|
||||||
|
pub unsafe fn g();
|
||||||
|
|
||||||
|
pub fn h();
|
||||||
|
|
||||||
|
pub safe static S1: i32;
|
||||||
|
|
||||||
|
pub unsafe static S2: i32;
|
||||||
|
|
||||||
|
pub static S3: i32;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
f();
|
||||||
|
g();
|
||||||
|
//^^^💡 error: this operation is unsafe and requires an unsafe function or block
|
||||||
|
h();
|
||||||
|
//^^^💡 error: this operation is unsafe and requires an unsafe function or block
|
||||||
|
|
||||||
|
let _ = S1;
|
||||||
|
let _ = S2;
|
||||||
|
//^^💡 error: this operation is unsafe and requires an unsafe function or block
|
||||||
|
let _ = S3;
|
||||||
|
//^^💡 error: this operation is unsafe and requires an unsafe function or block
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -286,6 +286,20 @@ fn main() {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Annotation {
|
||||||
|
range: 53..57,
|
||||||
|
kind: HasReferences {
|
||||||
|
pos: FilePositionWrapper {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
offset: 53,
|
||||||
|
},
|
||||||
|
data: Some(
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 53..57,
|
range: 53..57,
|
||||||
kind: Runnable(
|
kind: Runnable(
|
||||||
|
@ -305,20 +319,6 @@ fn main() {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
Annotation {
|
|
||||||
range: 53..57,
|
|
||||||
kind: HasReferences {
|
|
||||||
pos: FilePositionWrapper {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
offset: 53,
|
|
||||||
},
|
|
||||||
data: Some(
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -336,6 +336,20 @@ fn main() {
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
[
|
[
|
||||||
|
Annotation {
|
||||||
|
range: 7..11,
|
||||||
|
kind: HasImpls {
|
||||||
|
pos: FilePositionWrapper {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
offset: 7,
|
||||||
|
},
|
||||||
|
data: Some(
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 7..11,
|
range: 7..11,
|
||||||
kind: HasReferences {
|
kind: HasReferences {
|
||||||
|
@ -358,13 +372,13 @@ fn main() {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 7..11,
|
range: 17..21,
|
||||||
kind: HasImpls {
|
kind: HasReferences {
|
||||||
pos: FilePositionWrapper {
|
pos: FilePositionWrapper {
|
||||||
file_id: FileId(
|
file_id: FileId(
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
offset: 7,
|
offset: 17,
|
||||||
},
|
},
|
||||||
data: Some(
|
data: Some(
|
||||||
[],
|
[],
|
||||||
|
@ -390,20 +404,6 @@ fn main() {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
Annotation {
|
|
||||||
range: 17..21,
|
|
||||||
kind: HasReferences {
|
|
||||||
pos: FilePositionWrapper {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
offset: 17,
|
|
||||||
},
|
|
||||||
data: Some(
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -425,6 +425,30 @@ fn main() {
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
[
|
[
|
||||||
|
Annotation {
|
||||||
|
range: 7..11,
|
||||||
|
kind: HasImpls {
|
||||||
|
pos: FilePositionWrapper {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
offset: 7,
|
||||||
|
},
|
||||||
|
data: Some(
|
||||||
|
[
|
||||||
|
NavigationTarget {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
full_range: 36..64,
|
||||||
|
focus_range: 57..61,
|
||||||
|
name: "impl",
|
||||||
|
kind: Impl,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 7..11,
|
range: 7..11,
|
||||||
kind: HasReferences {
|
kind: HasReferences {
|
||||||
|
@ -452,30 +476,6 @@ fn main() {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Annotation {
|
|
||||||
range: 7..11,
|
|
||||||
kind: HasImpls {
|
|
||||||
pos: FilePositionWrapper {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
offset: 7,
|
|
||||||
},
|
|
||||||
data: Some(
|
|
||||||
[
|
|
||||||
NavigationTarget {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
full_range: 36..64,
|
|
||||||
focus_range: 57..61,
|
|
||||||
name: "impl",
|
|
||||||
kind: Impl,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 20..31,
|
range: 20..31,
|
||||||
kind: HasImpls {
|
kind: HasImpls {
|
||||||
|
@ -521,20 +521,6 @@ fn main() {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Annotation {
|
|
||||||
range: 69..73,
|
|
||||||
kind: HasReferences {
|
|
||||||
pos: FilePositionWrapper {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
offset: 69,
|
|
||||||
},
|
|
||||||
data: Some(
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 69..73,
|
range: 69..73,
|
||||||
kind: Runnable(
|
kind: Runnable(
|
||||||
|
@ -554,6 +540,20 @@ fn main() {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
Annotation {
|
||||||
|
range: 69..73,
|
||||||
|
kind: HasReferences {
|
||||||
|
pos: FilePositionWrapper {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
offset: 69,
|
||||||
|
},
|
||||||
|
data: Some(
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
]
|
]
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -567,6 +567,20 @@ fn main() {}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
[
|
[
|
||||||
|
Annotation {
|
||||||
|
range: 3..7,
|
||||||
|
kind: HasReferences {
|
||||||
|
pos: FilePositionWrapper {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
offset: 3,
|
||||||
|
},
|
||||||
|
data: Some(
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 3..7,
|
range: 3..7,
|
||||||
kind: Runnable(
|
kind: Runnable(
|
||||||
|
@ -586,20 +600,6 @@ fn main() {}
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
Annotation {
|
|
||||||
range: 3..7,
|
|
||||||
kind: HasReferences {
|
|
||||||
pos: FilePositionWrapper {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
offset: 3,
|
|
||||||
},
|
|
||||||
data: Some(
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -621,6 +621,30 @@ fn main() {
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
[
|
[
|
||||||
|
Annotation {
|
||||||
|
range: 7..11,
|
||||||
|
kind: HasImpls {
|
||||||
|
pos: FilePositionWrapper {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
offset: 7,
|
||||||
|
},
|
||||||
|
data: Some(
|
||||||
|
[
|
||||||
|
NavigationTarget {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
full_range: 14..56,
|
||||||
|
focus_range: 19..23,
|
||||||
|
name: "impl",
|
||||||
|
kind: Impl,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 7..11,
|
range: 7..11,
|
||||||
kind: HasReferences {
|
kind: HasReferences {
|
||||||
|
@ -648,30 +672,6 @@ fn main() {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Annotation {
|
|
||||||
range: 7..11,
|
|
||||||
kind: HasImpls {
|
|
||||||
pos: FilePositionWrapper {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
offset: 7,
|
|
||||||
},
|
|
||||||
data: Some(
|
|
||||||
[
|
|
||||||
NavigationTarget {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
full_range: 14..56,
|
|
||||||
focus_range: 19..23,
|
|
||||||
name: "impl",
|
|
||||||
kind: Impl,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 33..44,
|
range: 33..44,
|
||||||
kind: HasReferences {
|
kind: HasReferences {
|
||||||
|
@ -693,20 +693,6 @@ fn main() {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Annotation {
|
|
||||||
range: 61..65,
|
|
||||||
kind: HasReferences {
|
|
||||||
pos: FilePositionWrapper {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
offset: 61,
|
|
||||||
},
|
|
||||||
data: Some(
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 61..65,
|
range: 61..65,
|
||||||
kind: Runnable(
|
kind: Runnable(
|
||||||
|
@ -726,6 +712,20 @@ fn main() {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
Annotation {
|
||||||
|
range: 61..65,
|
||||||
|
kind: HasReferences {
|
||||||
|
pos: FilePositionWrapper {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
offset: 61,
|
||||||
|
},
|
||||||
|
data: Some(
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
]
|
]
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
|
@ -744,20 +744,6 @@ mod tests {
|
||||||
"#,
|
"#,
|
||||||
expect![[r#"
|
expect![[r#"
|
||||||
[
|
[
|
||||||
Annotation {
|
|
||||||
range: 3..7,
|
|
||||||
kind: HasReferences {
|
|
||||||
pos: FilePositionWrapper {
|
|
||||||
file_id: FileId(
|
|
||||||
0,
|
|
||||||
),
|
|
||||||
offset: 3,
|
|
||||||
},
|
|
||||||
data: Some(
|
|
||||||
[],
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 3..7,
|
range: 3..7,
|
||||||
kind: Runnable(
|
kind: Runnable(
|
||||||
|
@ -777,6 +763,20 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
Annotation {
|
||||||
|
range: 3..7,
|
||||||
|
kind: HasReferences {
|
||||||
|
pos: FilePositionWrapper {
|
||||||
|
file_id: FileId(
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
offset: 3,
|
||||||
|
},
|
||||||
|
data: Some(
|
||||||
|
[],
|
||||||
|
),
|
||||||
|
},
|
||||||
|
},
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 18..23,
|
range: 18..23,
|
||||||
kind: Runnable(
|
kind: Runnable(
|
||||||
|
@ -876,7 +876,7 @@ struct Foo;
|
||||||
[
|
[
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 0..71,
|
range: 0..71,
|
||||||
kind: HasReferences {
|
kind: HasImpls {
|
||||||
pos: FilePositionWrapper {
|
pos: FilePositionWrapper {
|
||||||
file_id: FileId(
|
file_id: FileId(
|
||||||
0,
|
0,
|
||||||
|
@ -890,7 +890,7 @@ struct Foo;
|
||||||
},
|
},
|
||||||
Annotation {
|
Annotation {
|
||||||
range: 0..71,
|
range: 0..71,
|
||||||
kind: HasImpls {
|
kind: HasReferences {
|
||||||
pos: FilePositionWrapper {
|
pos: FilePositionWrapper {
|
||||||
file_id: FileId(
|
file_id: FileId(
|
||||||
0,
|
0,
|
||||||
|
|
|
@ -497,6 +497,44 @@ fn func_in_include() {
|
||||||
//^^^^^^^^^^^^^^^
|
//^^^^^^^^^^^^^^^
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn foo() {
|
||||||
|
func_in_include$0();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_def_in_included_file_inside_mod() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore:include
|
||||||
|
//- /main.rs
|
||||||
|
mod a {
|
||||||
|
include!("b.rs");
|
||||||
|
}
|
||||||
|
//- /b.rs
|
||||||
|
fn func_in_include() {
|
||||||
|
//^^^^^^^^^^^^^^^
|
||||||
|
}
|
||||||
|
fn foo() {
|
||||||
|
func_in_include$0();
|
||||||
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore:include
|
||||||
|
//- /main.rs
|
||||||
|
mod a {
|
||||||
|
include!("a.rs");
|
||||||
|
}
|
||||||
|
//- /a.rs
|
||||||
|
fn func_in_include() {
|
||||||
|
//^^^^^^^^^^^^^^^
|
||||||
|
}
|
||||||
|
|
||||||
fn foo() {
|
fn foo() {
|
||||||
func_in_include$0();
|
func_in_include$0();
|
||||||
}
|
}
|
||||||
|
|
|
@ -2750,4 +2750,25 @@ impl Foo {
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn goto_ref_on_included_file() {
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
|
//- minicore:include
|
||||||
|
//- /lib.rs
|
||||||
|
include!("foo.rs");
|
||||||
|
fn howdy() {
|
||||||
|
let _ = FOO;
|
||||||
|
}
|
||||||
|
//- /foo.rs
|
||||||
|
const FOO$0: i32 = 0;
|
||||||
|
"#,
|
||||||
|
expect![[r#"
|
||||||
|
FOO Const FileId(1) 0..19 6..9
|
||||||
|
|
||||||
|
FileId(0) 45..48
|
||||||
|
"#]],
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1350,18 +1350,18 @@ mod tests {
|
||||||
file_id: FileId(
|
file_id: FileId(
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
full_range: 121..185,
|
full_range: 52..115,
|
||||||
focus_range: 136..145,
|
focus_range: 67..75,
|
||||||
name: "foo2_test",
|
name: "foo_test",
|
||||||
kind: Function,
|
kind: Function,
|
||||||
},
|
},
|
||||||
NavigationTarget {
|
NavigationTarget {
|
||||||
file_id: FileId(
|
file_id: FileId(
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
full_range: 52..115,
|
full_range: 121..185,
|
||||||
focus_range: 67..75,
|
focus_range: 136..145,
|
||||||
name: "foo_test",
|
name: "foo2_test",
|
||||||
kind: Function,
|
kind: Function,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
@ -46,14 +46,14 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
||||||
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
|
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
|
||||||
</style>
|
</style>
|
||||||
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||||
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="8384512769119783714" style="color: hsl(59,93%,58%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
|
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
|
||||||
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="17360984456076382725" style="color: hsl(95,79%,86%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="8384512769119783714" style="color: hsl(59,93%,58%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="10753541418856619067" style="color: hsl(51,52%,47%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||||
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="17186414787327620935" style="color: hsl(196,64%,89%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="8384512769119783714" style="color: hsl(59,93%,58%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="9865812862466303869" style="color: hsl(329,86%,55%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||||
|
|
||||||
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="4786021388930833562" style="color: hsl(137,61%,87%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
|
<span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="4890670724659097491" style="color: hsl(330,46%,45%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
|
||||||
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="18017815841345165192" style="color: hsl(39,76%,89%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="4786021388930833562" style="color: hsl(137,61%,87%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="4002942168268782293" style="color: hsl(114,87%,67%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="4890670724659097491" style="color: hsl(330,46%,45%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||||
<span class="brace">}</span>
|
<span class="brace">}</span>
|
||||||
|
|
||||||
<span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
<span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||||
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="8384512769119783714" style="color: hsl(59,93%,58%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
|
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
|
||||||
<span class="brace">}</span></code></pre>
|
<span class="brace">}</span></code></pre>
|
|
@ -3,6 +3,7 @@
|
||||||
use intern::Symbol;
|
use intern::Symbol;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use span::{Edition, Span};
|
use span::{Edition, Span};
|
||||||
|
use stdx::itertools::Itertools;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasName},
|
ast::{self, HasName},
|
||||||
AstNode,
|
AstNode,
|
||||||
|
@ -27,9 +28,10 @@ fn benchmark_parse_macro_rules() {
|
||||||
let hash: usize = {
|
let hash: usize = {
|
||||||
let _pt = bench("mbe parse macro rules");
|
let _pt = bench("mbe parse macro rules");
|
||||||
rules
|
rules
|
||||||
.values()
|
.into_iter()
|
||||||
.map(|it| {
|
.sorted_by_key(|(id, _)| id.clone())
|
||||||
DeclarativeMacro::parse_macro_rules(it, |_| span::Edition::CURRENT).rules.len()
|
.map(|(_, it)| {
|
||||||
|
DeclarativeMacro::parse_macro_rules(&it, |_| span::Edition::CURRENT).rules.len()
|
||||||
})
|
})
|
||||||
.sum()
|
.sum()
|
||||||
};
|
};
|
||||||
|
@ -55,12 +57,13 @@ fn benchmark_expand_macro_rules() {
|
||||||
})
|
})
|
||||||
.sum()
|
.sum()
|
||||||
};
|
};
|
||||||
assert_eq!(hash, 69413);
|
assert_eq!(hash, 65720);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
|
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
|
||||||
macro_rules_fixtures_tt()
|
macro_rules_fixtures_tt()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
.sorted_by_key(|(id, _)| id.clone())
|
||||||
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, |_| span::Edition::CURRENT)))
|
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, |_| span::Edition::CURRENT)))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
@ -93,7 +96,7 @@ fn invocation_fixtures(
|
||||||
let mut seed = 123456789;
|
let mut seed = 123456789;
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
|
|
||||||
for (name, it) in rules {
|
for (name, it) in rules.iter().sorted_by_key(|&(id, _)| id) {
|
||||||
for rule in it.rules.iter() {
|
for rule in it.rules.iter() {
|
||||||
// Generate twice
|
// Generate twice
|
||||||
for _ in 0..2 {
|
for _ in 0..2 {
|
||||||
|
|
|
@ -135,6 +135,11 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
|
||||||
has_mods = true;
|
has_mods = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if p.at_contextual_kw(T![safe]) {
|
||||||
|
p.eat_contextual_kw(T![safe]);
|
||||||
|
has_mods = true;
|
||||||
|
}
|
||||||
|
|
||||||
if p.at(T![extern]) {
|
if p.at(T![extern]) {
|
||||||
has_extern = true;
|
has_extern = true;
|
||||||
has_mods = true;
|
has_mods = true;
|
||||||
|
@ -189,6 +194,7 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
|
||||||
T![fn] => fn_(p, m),
|
T![fn] => fn_(p, m),
|
||||||
|
|
||||||
T![const] if p.nth(1) != T!['{'] => consts::konst(p, m),
|
T![const] if p.nth(1) != T!['{'] => consts::konst(p, m),
|
||||||
|
T![static] if matches!(p.nth(1), IDENT | T![_] | T![mut]) => consts::static_(p, m),
|
||||||
|
|
||||||
T![trait] => traits::trait_(p, m),
|
T![trait] => traits::trait_(p, m),
|
||||||
T![impl] => traits::impl_(p, m),
|
T![impl] => traits::impl_(p, m),
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,208 @@
|
||||||
|
SOURCE_FILE
|
||||||
|
EXTERN_BLOCK
|
||||||
|
UNSAFE_KW "unsafe"
|
||||||
|
WHITESPACE " "
|
||||||
|
ABI
|
||||||
|
EXTERN_KW "extern"
|
||||||
|
WHITESPACE " "
|
||||||
|
EXTERN_ITEM_LIST
|
||||||
|
L_CURLY "{"
|
||||||
|
WHITESPACE "\n "
|
||||||
|
FN
|
||||||
|
COMMENT "// sqrt (from libm) may be called with any `f64`"
|
||||||
|
WHITESPACE "\n "
|
||||||
|
VISIBILITY
|
||||||
|
PUB_KW "pub"
|
||||||
|
WHITESPACE " "
|
||||||
|
SAFE_KW "safe"
|
||||||
|
WHITESPACE " "
|
||||||
|
FN_KW "fn"
|
||||||
|
WHITESPACE " "
|
||||||
|
NAME
|
||||||
|
IDENT "sqrt"
|
||||||
|
PARAM_LIST
|
||||||
|
L_PAREN "("
|
||||||
|
PARAM
|
||||||
|
IDENT_PAT
|
||||||
|
NAME
|
||||||
|
IDENT "x"
|
||||||
|
COLON ":"
|
||||||
|
WHITESPACE " "
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "f64"
|
||||||
|
R_PAREN ")"
|
||||||
|
WHITESPACE " "
|
||||||
|
RET_TYPE
|
||||||
|
THIN_ARROW "->"
|
||||||
|
WHITESPACE " "
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "f64"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE "\n\n "
|
||||||
|
FN
|
||||||
|
COMMENT "// strlen (from libc) requires a valid pointer,"
|
||||||
|
WHITESPACE "\n "
|
||||||
|
COMMENT "// so we mark it as being an unsafe fn"
|
||||||
|
WHITESPACE "\n "
|
||||||
|
VISIBILITY
|
||||||
|
PUB_KW "pub"
|
||||||
|
WHITESPACE " "
|
||||||
|
UNSAFE_KW "unsafe"
|
||||||
|
WHITESPACE " "
|
||||||
|
FN_KW "fn"
|
||||||
|
WHITESPACE " "
|
||||||
|
NAME
|
||||||
|
IDENT "strlen"
|
||||||
|
PARAM_LIST
|
||||||
|
L_PAREN "("
|
||||||
|
PARAM
|
||||||
|
IDENT_PAT
|
||||||
|
NAME
|
||||||
|
IDENT "p"
|
||||||
|
COLON ":"
|
||||||
|
WHITESPACE " "
|
||||||
|
PTR_TYPE
|
||||||
|
STAR "*"
|
||||||
|
CONST_KW "const"
|
||||||
|
WHITESPACE " "
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "c_char"
|
||||||
|
R_PAREN ")"
|
||||||
|
WHITESPACE " "
|
||||||
|
RET_TYPE
|
||||||
|
THIN_ARROW "->"
|
||||||
|
WHITESPACE " "
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "usize"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE "\n\n "
|
||||||
|
FN
|
||||||
|
COMMENT "// this function doesn't say safe or unsafe, so it defaults to unsafe"
|
||||||
|
WHITESPACE "\n "
|
||||||
|
VISIBILITY
|
||||||
|
PUB_KW "pub"
|
||||||
|
WHITESPACE " "
|
||||||
|
FN_KW "fn"
|
||||||
|
WHITESPACE " "
|
||||||
|
NAME
|
||||||
|
IDENT "free"
|
||||||
|
PARAM_LIST
|
||||||
|
L_PAREN "("
|
||||||
|
PARAM
|
||||||
|
IDENT_PAT
|
||||||
|
NAME
|
||||||
|
IDENT "p"
|
||||||
|
COLON ":"
|
||||||
|
WHITESPACE " "
|
||||||
|
PTR_TYPE
|
||||||
|
STAR "*"
|
||||||
|
MUT_KW "mut"
|
||||||
|
WHITESPACE " "
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "core"
|
||||||
|
COLON2 "::"
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "ffi"
|
||||||
|
COLON2 "::"
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "c_void"
|
||||||
|
R_PAREN ")"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE "\n\n "
|
||||||
|
STATIC
|
||||||
|
VISIBILITY
|
||||||
|
PUB_KW "pub"
|
||||||
|
WHITESPACE " "
|
||||||
|
SAFE_KW "safe"
|
||||||
|
WHITESPACE " "
|
||||||
|
STATIC_KW "static"
|
||||||
|
WHITESPACE " "
|
||||||
|
MUT_KW "mut"
|
||||||
|
WHITESPACE " "
|
||||||
|
NAME
|
||||||
|
IDENT "COUNTER"
|
||||||
|
COLON ":"
|
||||||
|
WHITESPACE " "
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "i32"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE "\n\n "
|
||||||
|
STATIC
|
||||||
|
VISIBILITY
|
||||||
|
PUB_KW "pub"
|
||||||
|
WHITESPACE " "
|
||||||
|
UNSAFE_KW "unsafe"
|
||||||
|
WHITESPACE " "
|
||||||
|
STATIC_KW "static"
|
||||||
|
WHITESPACE " "
|
||||||
|
NAME
|
||||||
|
IDENT "IMPORTANT_BYTES"
|
||||||
|
COLON ":"
|
||||||
|
WHITESPACE " "
|
||||||
|
ARRAY_TYPE
|
||||||
|
L_BRACK "["
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "u8"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE " "
|
||||||
|
CONST_ARG
|
||||||
|
LITERAL
|
||||||
|
INT_NUMBER "256"
|
||||||
|
R_BRACK "]"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE "\n\n "
|
||||||
|
STATIC
|
||||||
|
VISIBILITY
|
||||||
|
PUB_KW "pub"
|
||||||
|
WHITESPACE " "
|
||||||
|
SAFE_KW "safe"
|
||||||
|
WHITESPACE " "
|
||||||
|
STATIC_KW "static"
|
||||||
|
WHITESPACE " "
|
||||||
|
NAME
|
||||||
|
IDENT "LINES"
|
||||||
|
COLON ":"
|
||||||
|
WHITESPACE " "
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "SyncUnsafeCell"
|
||||||
|
GENERIC_ARG_LIST
|
||||||
|
L_ANGLE "<"
|
||||||
|
TYPE_ARG
|
||||||
|
PATH_TYPE
|
||||||
|
PATH
|
||||||
|
PATH_SEGMENT
|
||||||
|
NAME_REF
|
||||||
|
IDENT "i32"
|
||||||
|
R_ANGLE ">"
|
||||||
|
SEMICOLON ";"
|
||||||
|
WHITESPACE "\n"
|
||||||
|
R_CURLY "}"
|
||||||
|
WHITESPACE "\n"
|
|
@ -0,0 +1,17 @@
|
||||||
|
unsafe extern {
|
||||||
|
// sqrt (from libm) may be called with any `f64`
|
||||||
|
pub safe fn sqrt(x: f64) -> f64;
|
||||||
|
|
||||||
|
// strlen (from libc) requires a valid pointer,
|
||||||
|
// so we mark it as being an unsafe fn
|
||||||
|
pub unsafe fn strlen(p: *const c_char) -> usize;
|
||||||
|
|
||||||
|
// this function doesn't say safe or unsafe, so it defaults to unsafe
|
||||||
|
pub fn free(p: *mut core::ffi::c_void);
|
||||||
|
|
||||||
|
pub safe static mut COUNTER: i32;
|
||||||
|
|
||||||
|
pub unsafe static IMPORTANT_BYTES: [u8; 256];
|
||||||
|
|
||||||
|
pub safe static LINES: SyncUnsafeCell<i32>;
|
||||||
|
}
|
|
@ -222,8 +222,6 @@ fn rust_project_is_proc_macro_has_proc_macro_dep() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
// FIXME Remove the ignore
|
|
||||||
#[ignore = "requires nightly until the sysroot ships a cargo workspace for library on stable"]
|
|
||||||
fn smoke_test_real_sysroot_cargo() {
|
fn smoke_test_real_sysroot_cargo() {
|
||||||
let file_map = &mut FxHashMap::<AbsPathBuf, FileId>::default();
|
let file_map = &mut FxHashMap::<AbsPathBuf, FileId>::default();
|
||||||
let meta: Metadata = get_test_json_file("hello-world-metadata.json");
|
let meta: Metadata = get_test_json_file("hello-world-metadata.json");
|
||||||
|
@ -235,7 +233,6 @@ fn smoke_test_real_sysroot_cargo() {
|
||||||
&Default::default(),
|
&Default::default(),
|
||||||
);
|
);
|
||||||
assert!(matches!(sysroot.mode(), SysrootMode::Workspace(_)));
|
assert!(matches!(sysroot.mode(), SysrootMode::Workspace(_)));
|
||||||
|
|
||||||
let project_workspace = ProjectWorkspace {
|
let project_workspace = ProjectWorkspace {
|
||||||
kind: ProjectWorkspaceKind::Cargo {
|
kind: ProjectWorkspaceKind::Cargo {
|
||||||
cargo: cargo_workspace,
|
cargo: cargo_workspace,
|
||||||
|
|
|
@ -17,7 +17,7 @@ indexmap = "2.1.0"
|
||||||
lock_api = "0.4"
|
lock_api = "0.4"
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
parking_lot = "0.12.1"
|
parking_lot = "0.12.1"
|
||||||
rustc-hash = "1.0"
|
rustc-hash = "2.0.0"
|
||||||
smallvec = "1.0.0"
|
smallvec = "1.0.0"
|
||||||
oorandom = "11"
|
oorandom = "11"
|
||||||
triomphe = "0.1.11"
|
triomphe = "0.1.11"
|
||||||
|
|
|
@ -6,6 +6,7 @@ use std::{
|
||||||
time::{SystemTime, UNIX_EPOCH},
|
time::{SystemTime, UNIX_EPOCH},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use cfg::{CfgAtom, CfgDiff};
|
||||||
use hir::{
|
use hir::{
|
||||||
db::{DefDatabase, ExpandDatabase, HirDatabase},
|
db::{DefDatabase, ExpandDatabase, HirDatabase},
|
||||||
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ImportPathConfig,
|
Adt, AssocItem, Crate, DefWithBody, HasSource, HirDisplay, HirFileIdExt, ImportPathConfig,
|
||||||
|
@ -31,7 +32,7 @@ use itertools::Itertools;
|
||||||
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
|
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
|
||||||
use oorandom::Rand32;
|
use oorandom::Rand32;
|
||||||
use profile::{Bytes, StopWatch};
|
use profile::{Bytes, StopWatch};
|
||||||
use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
|
use project_model::{CargoConfig, CfgOverrides, ProjectManifest, ProjectWorkspace, RustLibSource};
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use syntax::{AstNode, SyntaxNode};
|
use syntax::{AstNode, SyntaxNode};
|
||||||
|
@ -65,7 +66,11 @@ impl flags::AnalysisStats {
|
||||||
false => Some(RustLibSource::Discover),
|
false => Some(RustLibSource::Discover),
|
||||||
},
|
},
|
||||||
all_targets: true,
|
all_targets: true,
|
||||||
set_test: true,
|
set_test: !self.no_test,
|
||||||
|
cfg_overrides: CfgOverrides {
|
||||||
|
global: CfgDiff::new(vec![CfgAtom::Flag(hir::sym::miri.clone())], vec![]).unwrap(),
|
||||||
|
selective: Default::default(),
|
||||||
|
},
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
let no_progress = &|_| ();
|
let no_progress = &|_| ();
|
||||||
|
|
|
@ -71,6 +71,8 @@ xflags::xflags! {
|
||||||
optional --with-deps
|
optional --with-deps
|
||||||
/// Don't load sysroot crates (`std`, `core` & friends).
|
/// Don't load sysroot crates (`std`, `core` & friends).
|
||||||
optional --no-sysroot
|
optional --no-sysroot
|
||||||
|
/// Don't set #[cfg(test)].
|
||||||
|
optional --no-test
|
||||||
|
|
||||||
/// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
|
/// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
|
||||||
optional --disable-build-scripts
|
optional --disable-build-scripts
|
||||||
|
@ -233,6 +235,7 @@ pub struct AnalysisStats {
|
||||||
pub only: Option<String>,
|
pub only: Option<String>,
|
||||||
pub with_deps: bool,
|
pub with_deps: bool,
|
||||||
pub no_sysroot: bool,
|
pub no_sysroot: bool,
|
||||||
|
pub no_test: bool,
|
||||||
pub disable_build_scripts: bool,
|
pub disable_build_scripts: bool,
|
||||||
pub disable_proc_macros: bool,
|
pub disable_proc_macros: bool,
|
||||||
pub proc_macro_srv: Option<PathBuf>,
|
pub proc_macro_srv: Option<PathBuf>,
|
||||||
|
|
|
@ -35,10 +35,18 @@ pub(crate) fn offset(
|
||||||
.ok_or_else(|| format_err!("Invalid wide col offset"))?
|
.ok_or_else(|| format_err!("Invalid wide col offset"))?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let text_size = line_index.index.offset(line_col).ok_or_else(|| {
|
let line_range = line_index.index.line(line_col.line).ok_or_else(|| {
|
||||||
format_err!("Invalid offset {line_col:?} (line index length: {:?})", line_index.index.len())
|
format_err!("Invalid offset {line_col:?} (line index length: {:?})", line_index.index.len())
|
||||||
})?;
|
})?;
|
||||||
Ok(text_size)
|
let col = TextSize::from(line_col.col);
|
||||||
|
let clamped_len = col.min(line_range.len());
|
||||||
|
if clamped_len < col {
|
||||||
|
tracing::error!(
|
||||||
|
"Position {line_col:?} column exceeds line length {}, clamping it",
|
||||||
|
u32::from(line_range.len()),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(line_range.start() + clamped_len)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn text_range(
|
pub(crate) fn text_range(
|
||||||
|
|
|
@ -190,7 +190,7 @@ UseTreeList =
|
||||||
|
|
||||||
Fn =
|
Fn =
|
||||||
Attr* Visibility?
|
Attr* Visibility?
|
||||||
'default'? 'const'? 'async'? 'gen'? 'unsafe'? Abi?
|
'default'? 'const'? 'async'? 'gen'? 'unsafe'? 'safe'? Abi?
|
||||||
'fn' Name GenericParamList? ParamList RetType? WhereClause?
|
'fn' Name GenericParamList? ParamList RetType? WhereClause?
|
||||||
(body:BlockExpr | ';')
|
(body:BlockExpr | ';')
|
||||||
|
|
||||||
|
@ -284,6 +284,7 @@ Const =
|
||||||
|
|
||||||
Static =
|
Static =
|
||||||
Attr* Visibility?
|
Attr* Visibility?
|
||||||
|
'unsafe'? 'safe'?
|
||||||
'static' 'mut'? Name ':' Type
|
'static' 'mut'? Name ':' Type
|
||||||
('=' body:Expr)? ';'
|
('=' body:Expr)? ';'
|
||||||
|
|
||||||
|
|
|
@ -668,6 +668,8 @@ impl Fn {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) }
|
pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) }
|
||||||
#[inline]
|
#[inline]
|
||||||
|
pub fn safe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![safe]) }
|
||||||
|
#[inline]
|
||||||
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
|
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1761,7 +1763,11 @@ impl Static {
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
|
pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
|
||||||
#[inline]
|
#[inline]
|
||||||
|
pub fn safe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![safe]) }
|
||||||
|
#[inline]
|
||||||
pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
|
pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
|
||||||
|
#[inline]
|
||||||
|
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
|
|
@ -27,7 +27,7 @@ pub struct AstPtr<N: AstNode> {
|
||||||
_ty: PhantomData<fn() -> N>,
|
_ty: PhantomData<fn() -> N>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<N: AstNode + std::fmt::Debug> std::fmt::Debug for AstPtr<N> {
|
impl<N: AstNode> std::fmt::Debug for AstPtr<N> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
f.debug_tuple("AstPtr").field(&self.raw).finish()
|
f.debug_tuple("AstPtr").field(&self.raw).finish()
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,8 +95,8 @@ avoid checking unnecessary things.
|
||||||
Default:
|
Default:
|
||||||
----
|
----
|
||||||
{
|
{
|
||||||
"debug_assertions": null,
|
"miri": null,
|
||||||
"miri": null
|
"debug_assertions": null
|
||||||
}
|
}
|
||||||
----
|
----
|
||||||
List of cfg options to enable with the given values.
|
List of cfg options to enable with the given values.
|
||||||
|
@ -321,18 +321,10 @@ Enables completions of private items and fields that are defined in the current
|
||||||
Default:
|
Default:
|
||||||
----
|
----
|
||||||
{
|
{
|
||||||
"Arc::new": {
|
"Ok": {
|
||||||
"postfix": "arc",
|
"postfix": "ok",
|
||||||
"body": "Arc::new(${receiver})",
|
"body": "Ok(${receiver})",
|
||||||
"requires": "std::sync::Arc",
|
"description": "Wrap the expression in a `Result::Ok`",
|
||||||
"description": "Put the expression into an `Arc`",
|
|
||||||
"scope": "expr"
|
|
||||||
},
|
|
||||||
"Rc::new": {
|
|
||||||
"postfix": "rc",
|
|
||||||
"body": "Rc::new(${receiver})",
|
|
||||||
"requires": "std::rc::Rc",
|
|
||||||
"description": "Put the expression into an `Rc`",
|
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
},
|
},
|
||||||
"Box::pin": {
|
"Box::pin": {
|
||||||
|
@ -342,10 +334,11 @@ Default:
|
||||||
"description": "Put the expression into a pinned `Box`",
|
"description": "Put the expression into a pinned `Box`",
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
},
|
},
|
||||||
"Err": {
|
"Arc::new": {
|
||||||
"postfix": "err",
|
"postfix": "arc",
|
||||||
"body": "Err(${receiver})",
|
"body": "Arc::new(${receiver})",
|
||||||
"description": "Wrap the expression in a `Result::Err`",
|
"requires": "std::sync::Arc",
|
||||||
|
"description": "Put the expression into an `Arc`",
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
},
|
},
|
||||||
"Some": {
|
"Some": {
|
||||||
|
@ -354,10 +347,17 @@ Default:
|
||||||
"description": "Wrap the expression in an `Option::Some`",
|
"description": "Wrap the expression in an `Option::Some`",
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
},
|
},
|
||||||
"Ok": {
|
"Err": {
|
||||||
"postfix": "ok",
|
"postfix": "err",
|
||||||
"body": "Ok(${receiver})",
|
"body": "Err(${receiver})",
|
||||||
"description": "Wrap the expression in a `Result::Ok`",
|
"description": "Wrap the expression in a `Result::Err`",
|
||||||
|
"scope": "expr"
|
||||||
|
},
|
||||||
|
"Rc::new": {
|
||||||
|
"postfix": "rc",
|
||||||
|
"body": "Rc::new(${receiver})",
|
||||||
|
"requires": "std::rc::Rc",
|
||||||
|
"description": "Put the expression into an `Rc`",
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
1539
editors/code/package-lock.json
generated
1539
editors/code/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -58,14 +58,14 @@
|
||||||
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
||||||
"@typescript-eslint/parser": "^6.0.0",
|
"@typescript-eslint/parser": "^6.0.0",
|
||||||
"@vscode/test-electron": "^2.3.8",
|
"@vscode/test-electron": "^2.3.8",
|
||||||
"@vscode/vsce": "^2.19.0",
|
"@vscode/vsce": "^3.0.0",
|
||||||
"esbuild": "^0.18.12",
|
"esbuild": "^0.18.12",
|
||||||
"eslint": "^8.44.0",
|
"eslint": "^8.44.0",
|
||||||
"eslint-config-prettier": "^8.8.0",
|
"eslint-config-prettier": "^8.8.0",
|
||||||
"ovsx": "^0.8.2",
|
"ovsx": "^0.8.2",
|
||||||
"prettier": "^3.0.0",
|
"prettier": "^3.0.0",
|
||||||
"tslib": "^2.6.0",
|
"tslib": "^2.6.0",
|
||||||
"typescript": "^5.1.6"
|
"typescript": "^5.6.0"
|
||||||
},
|
},
|
||||||
"activationEvents": [
|
"activationEvents": [
|
||||||
"workspaceContains:Cargo.toml",
|
"workspaceContains:Cargo.toml",
|
||||||
|
@ -349,6 +349,11 @@
|
||||||
"markdownDescription": "Whether to show the test explorer.",
|
"markdownDescription": "Whether to show the test explorer.",
|
||||||
"default": false,
|
"default": false,
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
|
},
|
||||||
|
"rust-analyzer.initializeStopped": {
|
||||||
|
"markdownDescription": "Do not start rust-analyzer server when the extension is activated.",
|
||||||
|
"default": false,
|
||||||
|
"type": "boolean"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -728,8 +733,8 @@
|
||||||
"rust-analyzer.cargo.cfgs": {
|
"rust-analyzer.cargo.cfgs": {
|
||||||
"markdownDescription": "List of cfg options to enable with the given values.",
|
"markdownDescription": "List of cfg options to enable with the given values.",
|
||||||
"default": {
|
"default": {
|
||||||
"debug_assertions": null,
|
"miri": null,
|
||||||
"miri": null
|
"debug_assertions": null
|
||||||
},
|
},
|
||||||
"type": "object"
|
"type": "object"
|
||||||
}
|
}
|
||||||
|
@ -1152,18 +1157,10 @@
|
||||||
"rust-analyzer.completion.snippets.custom": {
|
"rust-analyzer.completion.snippets.custom": {
|
||||||
"markdownDescription": "Custom completion snippets.",
|
"markdownDescription": "Custom completion snippets.",
|
||||||
"default": {
|
"default": {
|
||||||
"Arc::new": {
|
"Ok": {
|
||||||
"postfix": "arc",
|
"postfix": "ok",
|
||||||
"body": "Arc::new(${receiver})",
|
"body": "Ok(${receiver})",
|
||||||
"requires": "std::sync::Arc",
|
"description": "Wrap the expression in a `Result::Ok`",
|
||||||
"description": "Put the expression into an `Arc`",
|
|
||||||
"scope": "expr"
|
|
||||||
},
|
|
||||||
"Rc::new": {
|
|
||||||
"postfix": "rc",
|
|
||||||
"body": "Rc::new(${receiver})",
|
|
||||||
"requires": "std::rc::Rc",
|
|
||||||
"description": "Put the expression into an `Rc`",
|
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
},
|
},
|
||||||
"Box::pin": {
|
"Box::pin": {
|
||||||
|
@ -1173,10 +1170,11 @@
|
||||||
"description": "Put the expression into a pinned `Box`",
|
"description": "Put the expression into a pinned `Box`",
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
},
|
},
|
||||||
"Err": {
|
"Arc::new": {
|
||||||
"postfix": "err",
|
"postfix": "arc",
|
||||||
"body": "Err(${receiver})",
|
"body": "Arc::new(${receiver})",
|
||||||
"description": "Wrap the expression in a `Result::Err`",
|
"requires": "std::sync::Arc",
|
||||||
|
"description": "Put the expression into an `Arc`",
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
},
|
},
|
||||||
"Some": {
|
"Some": {
|
||||||
|
@ -1185,10 +1183,17 @@
|
||||||
"description": "Wrap the expression in an `Option::Some`",
|
"description": "Wrap the expression in an `Option::Some`",
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
},
|
},
|
||||||
"Ok": {
|
"Err": {
|
||||||
"postfix": "ok",
|
"postfix": "err",
|
||||||
"body": "Ok(${receiver})",
|
"body": "Err(${receiver})",
|
||||||
"description": "Wrap the expression in a `Result::Ok`",
|
"description": "Wrap the expression in a `Result::Err`",
|
||||||
|
"scope": "expr"
|
||||||
|
},
|
||||||
|
"Rc::new": {
|
||||||
|
"postfix": "rc",
|
||||||
|
"body": "Rc::new(${receiver})",
|
||||||
|
"requires": "std::rc::Rc",
|
||||||
|
"description": "Put the expression into an `Rc`",
|
||||||
"scope": "expr"
|
"scope": "expr"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -330,6 +330,10 @@ export class Config {
|
||||||
get statusBarClickAction() {
|
get statusBarClickAction() {
|
||||||
return this.get<string>("statusBar.clickAction");
|
return this.get<string>("statusBar.clickAction");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get initializeStopped() {
|
||||||
|
return this.get<boolean>("initializeStopped");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function prepareVSCodeConfig<T>(resp: T): T {
|
export function prepareVSCodeConfig<T>(resp: T): T {
|
||||||
|
|
|
@ -6,7 +6,6 @@ import type * as ra from "./lsp_ext";
|
||||||
import { Cargo } from "./toolchain";
|
import { Cargo } from "./toolchain";
|
||||||
import type { Ctx } from "./ctx";
|
import type { Ctx } from "./ctx";
|
||||||
import { createTaskFromRunnable, prepareEnv } from "./run";
|
import { createTaskFromRunnable, prepareEnv } from "./run";
|
||||||
import { execSync } from "node:child_process";
|
|
||||||
import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util";
|
import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util";
|
||||||
import type { Config } from "./config";
|
import type { Config } from "./config";
|
||||||
|
|
||||||
|
@ -152,9 +151,24 @@ async function getDebugConfiguration(
|
||||||
const env = prepareEnv(inheritEnv, runnable.label, runnableArgs, config.runnablesExtraEnv);
|
const env = prepareEnv(inheritEnv, runnable.label, runnableArgs, config.runnablesExtraEnv);
|
||||||
const executable = await getDebugExecutable(runnableArgs, env);
|
const executable = await getDebugExecutable(runnableArgs, env);
|
||||||
let sourceFileMap = debugOptions.sourceFileMap;
|
let sourceFileMap = debugOptions.sourceFileMap;
|
||||||
|
|
||||||
if (sourceFileMap === "auto") {
|
if (sourceFileMap === "auto") {
|
||||||
sourceFileMap = {};
|
sourceFileMap = {};
|
||||||
await discoverSourceFileMap(sourceFileMap, env, wsFolder);
|
const computedSourceFileMap = await discoverSourceFileMap(env, wsFolder);
|
||||||
|
|
||||||
|
if (computedSourceFileMap) {
|
||||||
|
// lldb-dap requires passing the source map as an array of two element arrays.
|
||||||
|
// the two element array contains a source and destination pathname.
|
||||||
|
// TODO: remove lldb-dap-specific post-processing once
|
||||||
|
// https://github.com/llvm/llvm-project/pull/106919/ is released in the extension.
|
||||||
|
if (provider.type === "lldb-dap") {
|
||||||
|
provider.additional["sourceMap"] = [
|
||||||
|
[computedSourceFileMap?.source, computedSourceFileMap?.destination],
|
||||||
|
];
|
||||||
|
} else {
|
||||||
|
sourceFileMap[computedSourceFileMap.source] = computedSourceFileMap.destination;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const debugConfig = getDebugConfig(
|
const debugConfig = getDebugConfig(
|
||||||
|
@ -189,11 +203,15 @@ async function getDebugConfiguration(
|
||||||
return debugConfig;
|
return debugConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type SourceFileMap = {
|
||||||
|
source: string;
|
||||||
|
destination: string;
|
||||||
|
};
|
||||||
|
|
||||||
async function discoverSourceFileMap(
|
async function discoverSourceFileMap(
|
||||||
sourceFileMap: Record<string, string>,
|
|
||||||
env: Record<string, string>,
|
env: Record<string, string>,
|
||||||
cwd: string,
|
cwd: string,
|
||||||
) {
|
): Promise<SourceFileMap | undefined> {
|
||||||
const sysroot = env["RUSTC_TOOLCHAIN"];
|
const sysroot = env["RUSTC_TOOLCHAIN"];
|
||||||
if (sysroot) {
|
if (sysroot) {
|
||||||
// let's try to use the default toolchain
|
// let's try to use the default toolchain
|
||||||
|
@ -203,9 +221,11 @@ async function discoverSourceFileMap(
|
||||||
const commitHash = rx.exec(data)?.[1];
|
const commitHash = rx.exec(data)?.[1];
|
||||||
if (commitHash) {
|
if (commitHash) {
|
||||||
const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust");
|
const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust");
|
||||||
sourceFileMap[`/rustc/${commitHash}/`] = rustlib;
|
return { source: rustlib, destination: rustlib };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
type PropertyFetcher<Config, Input, Key extends keyof Config> = (
|
type PropertyFetcher<Config, Input, Key extends keyof Config> = (
|
||||||
|
@ -218,7 +238,7 @@ type DebugConfigProvider<Type extends string, DebugConfig extends BaseDebugConfi
|
||||||
runnableArgsProperty: PropertyFetcher<DebugConfig, ra.CargoRunnableArgs, keyof DebugConfig>;
|
runnableArgsProperty: PropertyFetcher<DebugConfig, ra.CargoRunnableArgs, keyof DebugConfig>;
|
||||||
sourceFileMapProperty?: keyof DebugConfig;
|
sourceFileMapProperty?: keyof DebugConfig;
|
||||||
type: Type;
|
type: Type;
|
||||||
additional?: Record<string, unknown>;
|
additional: Record<string, unknown>;
|
||||||
};
|
};
|
||||||
|
|
||||||
type KnownEnginesType = (typeof knownEngines)[keyof typeof knownEngines];
|
type KnownEnginesType = (typeof knownEngines)[keyof typeof knownEngines];
|
||||||
|
@ -236,16 +256,7 @@ const knownEngines: {
|
||||||
"args",
|
"args",
|
||||||
runnableArgs.executableArgs,
|
runnableArgs.executableArgs,
|
||||||
],
|
],
|
||||||
additional: {
|
additional: {},
|
||||||
sourceMap: [
|
|
||||||
[
|
|
||||||
`/rustc/${/commit-hash:\s(.*)$/m.exec(
|
|
||||||
execSync("rustc -V -v", {}).toString(),
|
|
||||||
)?.[1]}/library`,
|
|
||||||
"${config:rust-analyzer.cargo.sysroot}/lib/rustlib/src/rust/library",
|
|
||||||
],
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
"vadimcn.vscode-lldb": {
|
"vadimcn.vscode-lldb": {
|
||||||
type: "lldb",
|
type: "lldb",
|
||||||
|
|
|
@ -107,7 +107,14 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> {
|
||||||
initializeDebugSessionTrackingAndRebuild(ctx);
|
initializeDebugSessionTrackingAndRebuild(ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
await ctx.start();
|
if (ctx.config.initializeStopped) {
|
||||||
|
ctx.setServerStatus({
|
||||||
|
health: "stopped",
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await ctx.start();
|
||||||
|
}
|
||||||
|
|
||||||
return ctx;
|
return ctx;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "line-index"
|
name = "line-index"
|
||||||
version = "0.1.1"
|
version = "0.1.2"
|
||||||
description = "Maps flat `TextSize` offsets to/from `(line, column)` representation."
|
description = "Maps flat `TextSize` offsets to/from `(line, column)` representation."
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index"
|
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index"
|
||||||
|
|
|
@ -177,6 +177,14 @@ impl LineIndex {
|
||||||
Some(LineCol { line: line_col.line, col })
|
Some(LineCol { line: line_col.line, col })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the given line's range.
|
||||||
|
pub fn line(&self, line: u32) -> Option<TextRange> {
|
||||||
|
let start = self.start_offset(line as usize)?;
|
||||||
|
let next_newline = self.newlines.get(line as usize).copied().unwrap_or(self.len);
|
||||||
|
let line_length = next_newline - start;
|
||||||
|
Some(TextRange::new(start, start + line_length))
|
||||||
|
}
|
||||||
|
|
||||||
/// Given a range [start, end), returns a sorted iterator of non-empty ranges [start, x1), [x1,
|
/// Given a range [start, end), returns a sorted iterator of non-empty ranges [start, x1), [x1,
|
||||||
/// x2), ..., [xn, end) where all the xi, which are positions of newlines, are inside the range
|
/// x2), ..., [xn, end) where all the xi, which are positions of newlines, are inside the range
|
||||||
/// [start, end).
|
/// [start, end).
|
||||||
|
|
|
@ -195,3 +195,26 @@ fn test_every_chars() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_line() {
|
||||||
|
use text_size::TextRange;
|
||||||
|
|
||||||
|
macro_rules! validate {
|
||||||
|
($text:expr, $line:expr, $expected_start:literal .. $expected_end:literal) => {
|
||||||
|
let line_index = LineIndex::new($text);
|
||||||
|
assert_eq!(
|
||||||
|
line_index.line($line),
|
||||||
|
Some(TextRange::new(
|
||||||
|
TextSize::from($expected_start),
|
||||||
|
TextSize::from($expected_end)
|
||||||
|
))
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
validate!("", 0, 0..0);
|
||||||
|
validate!("\n", 1, 1..1);
|
||||||
|
validate!("\nabc", 1, 1..4);
|
||||||
|
validate!("\nabc\ndef", 1, 1..5);
|
||||||
|
}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
dd5127615ad626741a1116d022cf784637ac05df
|
1de57a5ce952c722f7053aeacfc6c90bc139b678
|
||||||
|
|
|
@ -111,7 +111,7 @@ const RESERVED: &[&str] = &[
|
||||||
// keywords that are keywords only in specific parse contexts
|
// keywords that are keywords only in specific parse contexts
|
||||||
#[doc(alias = "WEAK_KEYWORDS")]
|
#[doc(alias = "WEAK_KEYWORDS")]
|
||||||
const CONTEXTUAL_KEYWORDS: &[&str] =
|
const CONTEXTUAL_KEYWORDS: &[&str] =
|
||||||
&["macro_rules", "union", "default", "raw", "dyn", "auto", "yeet"];
|
&["macro_rules", "union", "default", "raw", "dyn", "auto", "yeet", "safe"];
|
||||||
// keywords we use for special macro expansions
|
// keywords we use for special macro expansions
|
||||||
const CONTEXTUAL_BUILTIN_KEYWORDS: &[&str] = &[
|
const CONTEXTUAL_BUILTIN_KEYWORDS: &[&str] = &[
|
||||||
"asm",
|
"asm",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue