mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-27 18:26:19 +00:00
Rollup merge of #137921 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
This commit is contained in:
commit
a52ffa7068
206 changed files with 6072 additions and 4269 deletions
2
.github/workflows/autopublish.yaml
vendored
2
.github/workflows/autopublish.yaml
vendored
|
|
@ -11,7 +11,7 @@ on:
|
|||
|
||||
jobs:
|
||||
publish:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: publish
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
|
|||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
|
|
@ -174,7 +174,7 @@ jobs:
|
|||
- name: Install Nodejs
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
node-version: 22
|
||||
if: needs.changes.outputs.typescript == 'true'
|
||||
|
||||
- name: Install xvfb
|
||||
|
|
|
|||
2
.github/workflows/fuzz.yml
vendored
2
.github/workflows/fuzz.yml
vendored
|
|
@ -19,7 +19,7 @@ env:
|
|||
|
||||
jobs:
|
||||
rust:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: Rust
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
|
|
|
|||
2
.github/workflows/publish-libs.yaml
vendored
2
.github/workflows/publish-libs.yaml
vendored
|
|
@ -9,7 +9,7 @@ on:
|
|||
|
||||
jobs:
|
||||
publish-libs:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: publish
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
|
|||
59
.github/workflows/release.yaml
vendored
59
.github/workflows/release.yaml
vendored
|
|
@ -17,12 +17,12 @@ env:
|
|||
RUSTUP_MAX_RETRIES: 10
|
||||
FETCH_DEPTH: 0 # pull in the tags for the version string
|
||||
MACOSX_DEPLOYMENT_TARGET: 13.0
|
||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
|
||||
CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
||||
ZIG_VERSION: 0.13.0
|
||||
ZIGBUILD_VERSION: 0.19.8
|
||||
|
||||
jobs:
|
||||
dist:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
|
|
@ -36,13 +36,15 @@ jobs:
|
|||
code-target: win32-arm64
|
||||
- os: ubuntu-latest
|
||||
target: x86_64-unknown-linux-gnu
|
||||
zig_target: x86_64-unknown-linux-gnu.2.28
|
||||
code-target: linux-x64
|
||||
container: rockylinux:8
|
||||
- os: ubuntu-latest
|
||||
target: aarch64-unknown-linux-gnu
|
||||
zig_target: aarch64-unknown-linux-gnu.2.28
|
||||
code-target: linux-arm64
|
||||
- os: ubuntu-latest
|
||||
target: arm-unknown-linux-gnueabihf
|
||||
zig_target: arm-unknown-linux-gnueabihf.2.28
|
||||
code-target: linux-armhf
|
||||
- os: macos-13
|
||||
target: x86_64-apple-darwin
|
||||
|
|
@ -64,40 +66,33 @@ jobs:
|
|||
with:
|
||||
fetch-depth: ${{ env.FETCH_DEPTH }}
|
||||
|
||||
- name: Install toolchain dependencies
|
||||
if: matrix.container == 'rockylinux:8'
|
||||
shell: bash
|
||||
run: |
|
||||
dnf install -y gcc
|
||||
curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --profile minimal --default-toolchain none -y
|
||||
echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
|
||||
- name: Install Node.js toolchain
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: |
|
||||
rustup update --no-self-update stable
|
||||
rustup target add ${{ matrix.target }}
|
||||
rustup component add rust-src
|
||||
rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
- name: Install Zig toolchain
|
||||
if: ${{ matrix.zig_target }}
|
||||
run: |
|
||||
which cargo
|
||||
curl -L "https://ziglang.org/download/${ZIG_VERSION}/zig-linux-$(uname -m)-${ZIG_VERSION}.tar.xz" | sudo tar JxC /usr/local
|
||||
sudo ln -s "/usr/local/zig-linux-$(uname -m)-${ZIG_VERSION}/zig" /usr/local/bin/zig
|
||||
curl -L "https://github.com/rust-cross/cargo-zigbuild/releases/download/v${ZIGBUILD_VERSION}/cargo-zigbuild-v${ZIGBUILD_VERSION}.x86_64-unknown-linux-musl.tar.gz" | tar zxC ~/.cargo/bin
|
||||
|
||||
- name: Update apt repositories
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf'
|
||||
run: sudo apt-get update
|
||||
|
||||
- name: Install AArch64 target toolchain
|
||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||
run: sudo apt-get install gcc-aarch64-linux-gnu
|
||||
|
||||
- name: Install ARM target toolchain
|
||||
if: matrix.target == 'arm-unknown-linux-gnueabihf'
|
||||
run: sudo apt-get install gcc-arm-linux-gnueabihf
|
||||
|
||||
- name: Dist
|
||||
- name: Dist (plain)
|
||||
if: ${{ !matrix.zig_target }}
|
||||
run: cargo xtask dist --client-patch-version ${{ github.run_number }}
|
||||
|
||||
- name: Dist (using zigbuild)
|
||||
if: ${{ matrix.zig_target }}
|
||||
run: RA_TARGET=${{ matrix.zig_target}} cargo xtask dist --client-patch-version ${{ github.run_number }} --zig
|
||||
|
||||
- run: npm ci
|
||||
working-directory: editors/code
|
||||
|
||||
|
|
@ -139,7 +134,7 @@ jobs:
|
|||
path: ./dist
|
||||
|
||||
dist-x86_64-unknown-linux-musl:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: dist (x86_64-unknown-linux-musl)
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
|
|
@ -185,7 +180,7 @@ jobs:
|
|||
path: ./dist
|
||||
|
||||
publish:
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
|
||||
if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event_name == 'workflow_dispatch' }}
|
||||
name: publish
|
||||
runs-on: ubuntu-latest
|
||||
needs: ["dist", "dist-x86_64-unknown-linux-musl"]
|
||||
|
|
@ -193,7 +188,7 @@ jobs:
|
|||
- name: Install Nodejs
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 22
|
||||
|
||||
- run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV
|
||||
if: github.ref == 'refs/heads/release'
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ extend-ignore-re = [
|
|||
"INOUT",
|
||||
"optin",
|
||||
"=Pn",
|
||||
# ignore `// spellchecker:off` until `// spellchecker:on`
|
||||
"(?s)(#|//)\\s*spellchecker:off.*?\\n\\s*(#|//)\\s*spellchecker:on",
|
||||
]
|
||||
|
||||
[default.extend-words]
|
||||
|
|
|
|||
33
Cargo.lock
generated
33
Cargo.lock
generated
|
|
@ -22,9 +22,6 @@ name = "always-assert"
|
|||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1078fa1ce1e34b1872d8611ad921196d76bdd7027e949fbe31231abde201892"
|
||||
dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
|
|
@ -1507,9 +1504,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_abi"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3829c3355d1681ffeaf1450ec71edcdace6820fe2e86469d8fc1ad45e2c96460"
|
||||
checksum = "4b42cccfff8091a4c3397736518774dbad619e82f8def6f70d8e46dbbe396007"
|
||||
dependencies = [
|
||||
"bitflags 2.7.0",
|
||||
"ra-ap-rustc_hashes",
|
||||
|
|
@ -1519,18 +1516,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_hashes"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bd4d6d4c434bec08e02370a4f64a4985312097215a62e82d0f757f3a98e502e"
|
||||
checksum = "46d8bd34ed6552c8cac1764106ef5adbeef3e5c7700e0ceb4c83a47a631894fe"
|
||||
dependencies = [
|
||||
"rustc-stable-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bad6fc4bd7522e31096e2de5b0351144fe0684b608791ee26c842bf2da1b19ae"
|
||||
checksum = "93799e4dccbbd47f8b66bc0aa42effc1b7077aaee09d8a40b86b8d659b80c7b7"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index_macros",
|
||||
"smallvec",
|
||||
|
|
@ -1538,9 +1535,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index_macros"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cfb234e1f84b92be45276c3025bee18789e9bc95bec8789bec961e78edb01c52"
|
||||
checksum = "30baa5d00f94ba437a9dcaf7ae074ebe4f367bb05a4c2835e0aa2e7af3463aac"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -1549,9 +1546,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_lexer"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a3a40bd11dc43d1cb110e730b80620cf8102f4cca8920a02b65954da0ed931f"
|
||||
checksum = "3004d1d1b50afe3e1f9cdd428a282da7ffbf5f26dd8bf04af0d651d44e4873d8"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"unicode-properties",
|
||||
|
|
@ -1560,9 +1557,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_parse_format"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5feb877478994cb4c0c0c7a5116a352eefc0634aefc8636feb00a893fa5b7135"
|
||||
checksum = "cb57e5124a64aaaf92c06130fbc1b8e1d547b5a2a96081f1f848e31c211df5d2"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index",
|
||||
"ra-ap-rustc_lexer",
|
||||
|
|
@ -1570,9 +1567,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_pattern_analysis"
|
||||
version = "0.97.0"
|
||||
version = "0.98.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a76774d35934d464c4115908cde16f76a4f7e540fe1eea6b79336c556e37bdd3"
|
||||
checksum = "e427c3d30e4bdff28abd6b0ef3e6f4dfab44acd9468a4954eeff8717d8df8819"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index",
|
||||
"rustc-hash 2.0.0",
|
||||
|
|
@ -1940,13 +1937,13 @@ dependencies = [
|
|||
name = "stdx"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"always-assert",
|
||||
"backtrace",
|
||||
"crossbeam-channel",
|
||||
"itertools",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
"miow",
|
||||
"tracing",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
|
|
|
|||
13
Cargo.toml
13
Cargo.toml
|
|
@ -25,7 +25,6 @@ salsa.opt-level = 3
|
|||
miniz_oxide.opt-level = 3
|
||||
|
||||
[profile.release]
|
||||
incremental = true
|
||||
# Set this to 1 or 2 to get more useful backtraces in debugger.
|
||||
debug = 0
|
||||
|
||||
|
|
@ -86,12 +85,12 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
|||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||
edition = { path = "./crates/edition", version = "0.0.0" }
|
||||
|
||||
ra-ap-rustc_hashes = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_lexer = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.97", default-features = false }
|
||||
ra-ap-rustc_hashes = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_lexer = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.98", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.98", default-features = false }
|
||||
|
||||
# local crates that aren't published to crates.io. These should not have versions.
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
lz4_flex = { version = "0.11", default-features = false }
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
rustc-hash.workspace = true
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
arrayvec.workspace = true
|
||||
|
|
|
|||
|
|
@ -5,7 +5,9 @@
|
|||
//!
|
||||
//! It is used like this:
|
||||
//!
|
||||
//! ```
|
||||
//! ```ignore
|
||||
//! # use hir_def::dyn_map::DynMap;
|
||||
//! # use hir_def::dyn_map::Key;
|
||||
//! // keys define submaps of a `DynMap`
|
||||
//! const STRING_TO_U32: Key<String, u32> = Key::new();
|
||||
//! const U32_TO_VEC: Key<u32, Vec<bool>> = Key::new();
|
||||
|
|
|
|||
|
|
@ -883,20 +883,20 @@ pub struct UseTree {
|
|||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum UseTreeKind {
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// use path::to::Item;
|
||||
/// use path::to::Item as Renamed;
|
||||
/// use path::to::Trait as _;
|
||||
/// ```
|
||||
Single { path: Interned<ModPath>, alias: Option<ImportAlias> },
|
||||
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// use *; // (invalid, but can occur in nested tree)
|
||||
/// use path::*;
|
||||
/// ```
|
||||
Glob { path: Option<Interned<ModPath>> },
|
||||
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// use prefix::{self, Item, ...};
|
||||
/// ```
|
||||
Prefixed { prefix: Option<Interned<ModPath>>, list: Box<[UseTree]> },
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ use hir_expand::{
|
|||
db::ExpandDatabase,
|
||||
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
|
||||
span_map::SpanMapRef,
|
||||
InFile, MacroCallKind, MacroFileId, MacroFileIdExt,
|
||||
InFile, MacroCallKind, MacroFileId, MacroFileIdExt, MacroKind,
|
||||
};
|
||||
use intern::Symbol;
|
||||
use itertools::Itertools;
|
||||
|
|
@ -211,7 +211,11 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
|||
|
||||
if let Some(src) = src {
|
||||
if let Some(file_id) = src.file_id.macro_file() {
|
||||
if file_id.is_attr_macro(&db) || file_id.is_custom_derive(&db) {
|
||||
if let MacroKind::Derive
|
||||
| MacroKind::DeriveBuiltIn
|
||||
| MacroKind::Attr
|
||||
| MacroKind::AttrBuiltIn = file_id.kind(&db)
|
||||
{
|
||||
let call = file_id.call_node(&db);
|
||||
let mut show_spans = false;
|
||||
let mut show_ctxt = false;
|
||||
|
|
@ -236,7 +240,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
|||
for impl_id in def_map[local_id].scope.impls() {
|
||||
let src = impl_id.lookup(&db).source(&db);
|
||||
if let Some(macro_file) = src.file_id.macro_file() {
|
||||
if macro_file.is_builtin_derive(&db) {
|
||||
if let MacroKind::DeriveBuiltIn | MacroKind::Derive = macro_file.kind(&db) {
|
||||
let pp = pretty_print_macro_expansion(
|
||||
src.value.syntax().clone(),
|
||||
db.span_map(macro_file.into()).as_ref(),
|
||||
|
|
|
|||
|
|
@ -134,7 +134,7 @@ impl DirPath {
|
|||
/// So this is the case which doesn't really work I think if we try to be
|
||||
/// 100% platform agnostic:
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// mod a {
|
||||
/// #[path="C://sad/face"]
|
||||
/// mod b { mod c; }
|
||||
|
|
|
|||
|
|
@ -532,16 +532,17 @@ impl Resolver {
|
|||
/// Note that in Rust one name can be bound to several items:
|
||||
///
|
||||
/// ```
|
||||
/// # #![allow(non_camel_case_types)]
|
||||
/// macro_rules! t { () => (()) }
|
||||
/// type t = t!();
|
||||
/// const t: t = t!()
|
||||
/// const t: t = t!();
|
||||
/// ```
|
||||
///
|
||||
/// That's why we return a multimap.
|
||||
///
|
||||
/// The shadowing is accounted for: in
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// let it = 92;
|
||||
/// {
|
||||
/// let it = 92;
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ fn dummy_gate_test_expand(
|
|||
/// somewhat inconsistently resolve derive attributes.
|
||||
///
|
||||
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// #![Foo]
|
||||
/// #![bar::Bar]
|
||||
/// ```
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
|
|||
|
||||
use crate::{
|
||||
db::{self, ExpandDatabase},
|
||||
map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt,
|
||||
map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt, MacroKind,
|
||||
};
|
||||
|
||||
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
|
||||
|
|
@ -276,7 +276,11 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
|
|||
HirFileIdRepr::FileId(file_id) => {
|
||||
return Some(InRealFile { file_id, value: self.value.borrow().clone() })
|
||||
}
|
||||
HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
|
||||
HirFileIdRepr::MacroFile(m)
|
||||
if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) =>
|
||||
{
|
||||
m
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
|
|
@ -453,7 +457,7 @@ impl<N: AstNode> InFile<N> {
|
|||
}
|
||||
HirFileIdRepr::MacroFile(m) => m,
|
||||
};
|
||||
if !file_id.is_attr_macro(db) {
|
||||
if !matches!(file_id.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -416,6 +416,24 @@ impl HirFileIdExt for HirFileId {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum MacroKind {
|
||||
/// `macro_rules!` or Macros 2.0 macro.
|
||||
Declarative,
|
||||
/// A built-in function-like macro.
|
||||
DeclarativeBuiltIn,
|
||||
/// A custom derive.
|
||||
Derive,
|
||||
/// A builtin-in derive.
|
||||
DeriveBuiltIn,
|
||||
/// A procedural attribute macro.
|
||||
Attr,
|
||||
/// A built-in attribute macro.
|
||||
AttrBuiltIn,
|
||||
/// A function-like procedural macro.
|
||||
ProcMacro,
|
||||
}
|
||||
|
||||
pub trait MacroFileIdExt {
|
||||
fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
|
|
@ -427,15 +445,12 @@ pub trait MacroFileIdExt {
|
|||
|
||||
fn expansion_info(self, db: &dyn ExpandDatabase) -> ExpansionInfo;
|
||||
|
||||
fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind;
|
||||
|
||||
/// Return whether this file is an include macro
|
||||
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
|
||||
fn is_eager(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
/// Return whether this file is an attr macro
|
||||
fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool;
|
||||
|
||||
/// Return whether this file is the pseudo expansion of the derive attribute.
|
||||
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
|
||||
|
|
@ -468,18 +483,18 @@ impl MacroFileIdExt for MacroFileId {
|
|||
ExpansionInfo::new(db, self)
|
||||
}
|
||||
|
||||
fn is_custom_derive(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.kind,
|
||||
MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive)
|
||||
)
|
||||
}
|
||||
|
||||
fn is_builtin_derive(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.kind,
|
||||
MacroDefKind::BuiltInDerive(..)
|
||||
)
|
||||
fn kind(&self, db: &dyn ExpandDatabase) -> MacroKind {
|
||||
match db.lookup_intern_macro_call(self.macro_call_id).def.kind {
|
||||
MacroDefKind::Declarative(..) => MacroKind::Declarative,
|
||||
MacroDefKind::BuiltIn(..) | MacroDefKind::BuiltInEager(..) => {
|
||||
MacroKind::DeclarativeBuiltIn
|
||||
}
|
||||
MacroDefKind::BuiltInDerive(..) => MacroKind::DeriveBuiltIn,
|
||||
MacroDefKind::ProcMacro(_, _, ProcMacroKind::CustomDerive) => MacroKind::Derive,
|
||||
MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr) => MacroKind::Attr,
|
||||
MacroDefKind::ProcMacro(_, _, ProcMacroKind::Bang) => MacroKind::ProcMacro,
|
||||
MacroDefKind::BuiltInAttr(..) => MacroKind::AttrBuiltIn,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_include_macro(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
|
|
@ -507,13 +522,6 @@ impl MacroFileIdExt for MacroFileId {
|
|||
}
|
||||
}
|
||||
|
||||
fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.kind,
|
||||
MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, _, ProcMacroKind::Attr)
|
||||
)
|
||||
}
|
||||
|
||||
fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
|
||||
let loc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
loc.def.is_attribute_derive()
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
|
|
|||
|
|
@ -354,12 +354,43 @@ fn overflowing_add() {
|
|||
fn needs_drop() {
|
||||
check_number(
|
||||
r#"
|
||||
//- minicore: copy, sized
|
||||
//- minicore: drop, manually_drop, copy, sized
|
||||
use core::mem::ManuallyDrop;
|
||||
extern "rust-intrinsic" {
|
||||
pub fn needs_drop<T: ?Sized>() -> bool;
|
||||
}
|
||||
struct X;
|
||||
const GOAL: bool = !needs_drop::<i32>() && needs_drop::<X>();
|
||||
struct NeedsDrop;
|
||||
impl Drop for NeedsDrop {
|
||||
fn drop(&mut self) {}
|
||||
}
|
||||
enum Enum<T> {
|
||||
A(T),
|
||||
B(X),
|
||||
}
|
||||
const fn val_needs_drop<T>(_v: T) -> bool { needs_drop::<T>() }
|
||||
const fn closure_needs_drop() -> bool {
|
||||
let a = NeedsDrop;
|
||||
let b = X;
|
||||
!val_needs_drop(|| &a) && val_needs_drop(move || &a) && !val_needs_drop(move || &b)
|
||||
}
|
||||
const fn opaque() -> impl Sized {
|
||||
|| {}
|
||||
}
|
||||
const fn opaque_copy() -> impl Sized + Copy {
|
||||
|| {}
|
||||
}
|
||||
trait Everything {}
|
||||
impl<T> Everything for T {}
|
||||
const GOAL: bool = !needs_drop::<i32>() && !needs_drop::<X>()
|
||||
&& needs_drop::<NeedsDrop>() && !needs_drop::<ManuallyDrop<NeedsDrop>>()
|
||||
&& needs_drop::<[NeedsDrop; 1]>() && !needs_drop::<[NeedsDrop; 0]>()
|
||||
&& needs_drop::<(X, NeedsDrop)>()
|
||||
&& needs_drop::<Enum<NeedsDrop>>() && !needs_drop::<Enum<X>>()
|
||||
&& closure_needs_drop()
|
||||
&& !val_needs_drop(opaque()) && !val_needs_drop(opaque_copy())
|
||||
&& needs_drop::<[NeedsDrop]>() && needs_drop::<dyn Everything>()
|
||||
&& !needs_drop::<&dyn Everything>() && !needs_drop::<str>();
|
||||
"#,
|
||||
1,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ use hir_def::{
|
|||
ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
|
||||
LifetimeParamId, LocalFieldId, StaticId, TraitId, TypeAliasId, TypeOrConstParamId, VariantId,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
use la_arena::ArenaMap;
|
||||
use smallvec::SmallVec;
|
||||
use triomphe::Arc;
|
||||
|
|
@ -20,6 +21,7 @@ use triomphe::Arc;
|
|||
use crate::{
|
||||
chalk_db,
|
||||
consteval::ConstEvalError,
|
||||
drop::DropGlue,
|
||||
dyn_compatibility::DynCompatibilityViolation,
|
||||
layout::{Layout, LayoutError},
|
||||
lower::{Diagnostics, GenericDefaults, GenericPredicates},
|
||||
|
|
@ -28,7 +30,6 @@ use crate::{
|
|||
Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
|
||||
PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
|
||||
#[ra_salsa::query_group(HirDatabaseStorage)]
|
||||
pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
||||
|
|
@ -305,6 +306,10 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
|
|||
block: Option<BlockId>,
|
||||
env: chalk_ir::Environment<Interner>,
|
||||
) -> chalk_ir::ProgramClauses<Interner>;
|
||||
|
||||
#[ra_salsa::invoke(crate::drop::has_drop_glue)]
|
||||
#[ra_salsa::cycle(crate::drop::has_drop_glue_recover)]
|
||||
fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue {}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -95,7 +95,7 @@ pub struct HirFormatter<'a> {
|
|||
enum BoundsFormattingCtx {
|
||||
Entered {
|
||||
/// We can have recursive bounds like the following case:
|
||||
/// ```rust
|
||||
/// ```ignore
|
||||
/// where
|
||||
/// T: Foo,
|
||||
/// T::FooAssoc: Baz<<T::FooAssoc as Bar>::BarAssoc> + Bar
|
||||
|
|
|
|||
209
crates/hir-ty/src/drop.rs
Normal file
209
crates/hir-ty/src/drop.rs
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
//! Utilities for computing drop info about types.
|
||||
|
||||
use base_db::ra_salsa;
|
||||
use chalk_ir::cast::Cast;
|
||||
use hir_def::data::adt::StructFlags;
|
||||
use hir_def::lang_item::LangItem;
|
||||
use hir_def::AdtId;
|
||||
use stdx::never;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase, method_resolution::TyFingerprint, AliasTy, Canonical, CanonicalVarKinds,
|
||||
InEnvironment, Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind,
|
||||
};
|
||||
use crate::{ConcreteConst, ConstScalar, ConstValue};
|
||||
|
||||
fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
|
||||
let module = match adt {
|
||||
AdtId::EnumId(id) => db.lookup_intern_enum(id).container,
|
||||
AdtId::StructId(id) => db.lookup_intern_struct(id).container,
|
||||
AdtId::UnionId(id) => db.lookup_intern_union(id).container,
|
||||
};
|
||||
let Some(drop_trait) =
|
||||
db.lang_item(module.krate(), LangItem::Drop).and_then(|it| it.as_trait())
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
let impls = match module.containing_block() {
|
||||
Some(block) => match db.trait_impls_in_block(block) {
|
||||
Some(it) => it,
|
||||
None => return false,
|
||||
},
|
||||
None => db.trait_impls_in_crate(module.krate()),
|
||||
};
|
||||
let result = impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some();
|
||||
result
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum DropGlue {
|
||||
// Order of variants is important.
|
||||
None,
|
||||
/// May have a drop glue if some type parameter has it.
|
||||
///
|
||||
/// For the compiler this is considered as a positive result, IDE distinguishes this from "yes".
|
||||
DependOnParams,
|
||||
HasDropGlue,
|
||||
}
|
||||
|
||||
pub(crate) fn has_drop_glue(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue {
|
||||
match ty.kind(Interner) {
|
||||
TyKind::Adt(adt, subst) => {
|
||||
if has_destructor(db, adt.0) {
|
||||
return DropGlue::HasDropGlue;
|
||||
}
|
||||
match adt.0 {
|
||||
AdtId::StructId(id) => {
|
||||
if db.struct_data(id).flags.contains(StructFlags::IS_MANUALLY_DROP) {
|
||||
return DropGlue::None;
|
||||
}
|
||||
db.field_types(id.into())
|
||||
.iter()
|
||||
.map(|(_, field_ty)| {
|
||||
db.has_drop_glue(
|
||||
field_ty.clone().substitute(Interner, subst),
|
||||
env.clone(),
|
||||
)
|
||||
})
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None)
|
||||
}
|
||||
// Unions cannot have fields with destructors.
|
||||
AdtId::UnionId(_) => DropGlue::None,
|
||||
AdtId::EnumId(id) => db
|
||||
.enum_data(id)
|
||||
.variants
|
||||
.iter()
|
||||
.map(|&(variant, _)| {
|
||||
db.field_types(variant.into())
|
||||
.iter()
|
||||
.map(|(_, field_ty)| {
|
||||
db.has_drop_glue(
|
||||
field_ty.clone().substitute(Interner, subst),
|
||||
env.clone(),
|
||||
)
|
||||
})
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None)
|
||||
})
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None),
|
||||
}
|
||||
}
|
||||
TyKind::Tuple(_, subst) => subst
|
||||
.iter(Interner)
|
||||
.map(|ty| ty.assert_ty_ref(Interner))
|
||||
.map(|ty| db.has_drop_glue(ty.clone(), env.clone()))
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None),
|
||||
TyKind::Array(ty, len) => {
|
||||
if let ConstValue::Concrete(ConcreteConst { interned: ConstScalar::Bytes(len, _) }) =
|
||||
&len.data(Interner).value
|
||||
{
|
||||
match (&**len).try_into() {
|
||||
Ok(len) => {
|
||||
let len = usize::from_le_bytes(len);
|
||||
if len == 0 {
|
||||
// Arrays of size 0 don't have drop glue.
|
||||
return DropGlue::None;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
never!("const array size with non-usize len");
|
||||
}
|
||||
}
|
||||
}
|
||||
db.has_drop_glue(ty.clone(), env)
|
||||
}
|
||||
TyKind::Slice(ty) => db.has_drop_glue(ty.clone(), env),
|
||||
TyKind::Closure(closure_id, subst) => {
|
||||
let owner = db.lookup_intern_closure((*closure_id).into()).0;
|
||||
let infer = db.infer(owner);
|
||||
let (captures, _) = infer.closure_info(closure_id);
|
||||
let env = db.trait_environment_for_body(owner);
|
||||
captures
|
||||
.iter()
|
||||
.map(|capture| db.has_drop_glue(capture.ty(subst), env.clone()))
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None)
|
||||
}
|
||||
// FIXME: Handle coroutines.
|
||||
TyKind::Coroutine(..) | TyKind::CoroutineWitness(..) => DropGlue::None,
|
||||
TyKind::Ref(..)
|
||||
| TyKind::Raw(..)
|
||||
| TyKind::FnDef(..)
|
||||
| TyKind::Str
|
||||
| TyKind::Never
|
||||
| TyKind::Scalar(_)
|
||||
| TyKind::Function(_)
|
||||
| TyKind::Foreign(_)
|
||||
| TyKind::Error => DropGlue::None,
|
||||
TyKind::Dyn(_) => DropGlue::HasDropGlue,
|
||||
TyKind::AssociatedType(assoc_type_id, subst) => projection_has_drop_glue(
|
||||
db,
|
||||
env,
|
||||
ProjectionTy { associated_ty_id: *assoc_type_id, substitution: subst.clone() },
|
||||
ty,
|
||||
),
|
||||
TyKind::Alias(AliasTy::Projection(projection)) => {
|
||||
projection_has_drop_glue(db, env, projection.clone(), ty)
|
||||
}
|
||||
TyKind::OpaqueType(..) | TyKind::Alias(AliasTy::Opaque(_)) => {
|
||||
if is_copy(db, ty, env) {
|
||||
DropGlue::None
|
||||
} else {
|
||||
DropGlue::HasDropGlue
|
||||
}
|
||||
}
|
||||
TyKind::Placeholder(_) | TyKind::BoundVar(_) => {
|
||||
if is_copy(db, ty, env) {
|
||||
DropGlue::None
|
||||
} else {
|
||||
DropGlue::DependOnParams
|
||||
}
|
||||
}
|
||||
TyKind::InferenceVar(..) => unreachable!("inference vars shouldn't exist out of inference"),
|
||||
}
|
||||
}
|
||||
|
||||
fn projection_has_drop_glue(
|
||||
db: &dyn HirDatabase,
|
||||
env: Arc<TraitEnvironment>,
|
||||
projection: ProjectionTy,
|
||||
ty: Ty,
|
||||
) -> DropGlue {
|
||||
let normalized = db.normalize_projection(projection, env.clone());
|
||||
match normalized.kind(Interner) {
|
||||
TyKind::Alias(AliasTy::Projection(_)) | TyKind::AssociatedType(..) => {
|
||||
if is_copy(db, ty, env) {
|
||||
DropGlue::None
|
||||
} else {
|
||||
DropGlue::DependOnParams
|
||||
}
|
||||
}
|
||||
_ => db.has_drop_glue(normalized, env),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
|
||||
let Some(copy_trait) = db.lang_item(env.krate, LangItem::Copy).and_then(|it| it.as_trait())
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(ty).build();
|
||||
let goal = Canonical {
|
||||
value: InEnvironment::new(&env.env, trait_ref.cast(Interner)),
|
||||
binders: CanonicalVarKinds::empty(Interner),
|
||||
};
|
||||
db.trait_solve(env.krate, env.block, goal).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn has_drop_glue_recover(
|
||||
_db: &dyn HirDatabase,
|
||||
_cycle: &ra_salsa::Cycle,
|
||||
_ty: &Ty,
|
||||
_env: &Arc<TraitEnvironment>,
|
||||
) -> DropGlue {
|
||||
DropGlue::None
|
||||
}
|
||||
|
|
@ -335,7 +335,7 @@ impl Default for InternedStandardTypes {
|
|||
/// sized struct to a dynamically sized one. E.g., &[i32; 4] -> &[i32] is
|
||||
/// represented by:
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// Deref(None) -> [i32; 4],
|
||||
/// Borrow(AutoBorrow::Ref) -> &[i32; 4],
|
||||
/// Unsize -> &[i32],
|
||||
|
|
@ -481,9 +481,10 @@ pub struct InferenceResult {
|
|||
/// or pattern can have multiple binding modes. For example:
|
||||
/// ```
|
||||
/// fn foo(mut slice: &[u32]) -> usize {
|
||||
/// slice = match slice {
|
||||
/// [0, rest @ ..] | rest => rest,
|
||||
/// };
|
||||
/// slice = match slice {
|
||||
/// [0, rest @ ..] | rest => rest,
|
||||
/// };
|
||||
/// 0
|
||||
/// }
|
||||
/// ```
|
||||
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
|
||||
|
|
|
|||
|
|
@ -30,11 +30,13 @@ extern crate ra_ap_rustc_hashes as rustc_hashes;
|
|||
mod builder;
|
||||
mod chalk_db;
|
||||
mod chalk_ext;
|
||||
mod drop;
|
||||
mod infer;
|
||||
mod inhabitedness;
|
||||
mod interner;
|
||||
mod lower;
|
||||
mod mapping;
|
||||
mod target_feature;
|
||||
mod tls;
|
||||
mod utils;
|
||||
|
||||
|
|
@ -87,6 +89,7 @@ use crate::{
|
|||
pub use autoderef::autoderef;
|
||||
pub use builder::{ParamKind, TyBuilder};
|
||||
pub use chalk_ext::*;
|
||||
pub use drop::DropGlue;
|
||||
pub use infer::{
|
||||
cast::CastError,
|
||||
closure::{CaptureKind, CapturedItem},
|
||||
|
|
@ -105,10 +108,9 @@ pub use mapping::{
|
|||
to_foreign_def_id, to_placeholder_idx,
|
||||
};
|
||||
pub use method_resolution::check_orphan_rules;
|
||||
pub use target_feature::TargetFeatures;
|
||||
pub use traits::TraitEnvironment;
|
||||
pub use utils::{
|
||||
all_super_traits, direct_super_traits, is_fn_unsafe_to_call, TargetFeatures, Unsafety,
|
||||
};
|
||||
pub use utils::{all_super_traits, direct_super_traits, is_fn_unsafe_to_call, Unsafety};
|
||||
pub use variance::Variance;
|
||||
|
||||
pub use chalk_ir::{
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ use hir_def::{
|
|||
};
|
||||
use hir_expand::name::Name;
|
||||
use intern::{sym, Symbol};
|
||||
use stdx::never;
|
||||
|
||||
use crate::{
|
||||
error_lifetime,
|
||||
|
|
@ -20,6 +21,7 @@ use crate::{
|
|||
LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, Mutability, Result, Substitution,
|
||||
Ty, TyBuilder, TyExt,
|
||||
},
|
||||
DropGlue,
|
||||
};
|
||||
|
||||
mod simd;
|
||||
|
|
@ -853,7 +855,14 @@ impl Evaluator<'_> {
|
|||
"size_of generic arg is not provided".into(),
|
||||
));
|
||||
};
|
||||
let result = !ty.clone().is_copy(self.db, locals.body.owner);
|
||||
let result = match self.db.has_drop_glue(ty.clone(), self.trait_env.clone()) {
|
||||
DropGlue::HasDropGlue => true,
|
||||
DropGlue::None => false,
|
||||
DropGlue::DependOnParams => {
|
||||
never!("should be fully monomorphized now");
|
||||
true
|
||||
}
|
||||
};
|
||||
destination.write_from_bytes(self, &[u8::from(result)])
|
||||
}
|
||||
"ptr_guaranteed_cmp" => {
|
||||
|
|
|
|||
261
crates/hir-ty/src/target_feature.rs
Normal file
261
crates/hir-ty/src/target_feature.rs
Normal file
|
|
@ -0,0 +1,261 @@
|
|||
//! Stuff for handling `#[target_feature]` (needed for unsafe check).
|
||||
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use hir_def::attr::Attrs;
|
||||
use hir_def::tt;
|
||||
use intern::{sym, Symbol};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TargetFeatures {
|
||||
pub(crate) enabled: FxHashSet<Symbol>,
|
||||
}
|
||||
|
||||
impl TargetFeatures {
|
||||
pub fn from_attrs(attrs: &Attrs) -> Self {
|
||||
let mut result = TargetFeatures::from_attrs_no_implications(attrs);
|
||||
result.expand_implications();
|
||||
result
|
||||
}
|
||||
|
||||
fn expand_implications(&mut self) {
|
||||
let all_implications = LazyLock::force(&TARGET_FEATURE_IMPLICATIONS);
|
||||
let mut queue = self.enabled.iter().cloned().collect::<Vec<_>>();
|
||||
while let Some(feature) = queue.pop() {
|
||||
if let Some(implications) = all_implications.get(&feature) {
|
||||
for implication in implications {
|
||||
if self.enabled.insert(implication.clone()) {
|
||||
queue.push(implication.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the target features from the attributes, and does not expand the target features implied by them.
|
||||
pub(crate) fn from_attrs_no_implications(attrs: &Attrs) -> Self {
|
||||
let enabled = attrs
|
||||
.by_key(&sym::target_feature)
|
||||
.tt_values()
|
||||
.filter_map(|tt| {
|
||||
match tt.token_trees().flat_tokens() {
|
||||
[
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { kind: tt::LitKind::Str, symbol: features, .. })),
|
||||
] if enable_ident.sym == sym::enable => Some(features),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
.flat_map(|features| features.as_str().split(',').map(Symbol::intern))
|
||||
.collect();
|
||||
Self { enabled }
|
||||
}
|
||||
}
|
||||
|
||||
// List of the target features each target feature implies.
|
||||
// Ideally we'd depend on rustc for this, but rustc_target doesn't compile on stable,
|
||||
// and t-compiler prefers for it to stay this way.
|
||||
|
||||
static TARGET_FEATURE_IMPLICATIONS: LazyLock<FxHashMap<Symbol, Box<[Symbol]>>> =
|
||||
LazyLock::new(|| {
|
||||
let mut result = FxHashMap::<Symbol, FxHashSet<Symbol>>::default();
|
||||
for &(feature_str, implications) in TARGET_FEATURE_IMPLICATIONS_RAW {
|
||||
let feature = Symbol::intern(feature_str);
|
||||
let implications = implications.iter().copied().map(Symbol::intern);
|
||||
// Some target features appear in two archs, e.g. Arm and x86.
|
||||
// Sometimes they contain different implications, e.g. `aes`.
|
||||
// We should probably choose by the active arch, but for now just merge them.
|
||||
result.entry(feature).or_default().extend(implications);
|
||||
}
|
||||
let mut result = result
|
||||
.into_iter()
|
||||
.map(|(feature, implications)| (feature, Box::from_iter(implications)))
|
||||
.collect::<FxHashMap<_, _>>();
|
||||
result.shrink_to_fit();
|
||||
result
|
||||
});
|
||||
|
||||
// spellchecker:off
|
||||
const TARGET_FEATURE_IMPLICATIONS_RAW: &[(&str, &[&str])] = &[
|
||||
// Arm
|
||||
("aes", &["neon"]),
|
||||
("dotprod", &["neon"]),
|
||||
("fp-armv8", &["vfp4"]),
|
||||
("fp16", &["neon"]),
|
||||
("i8mm", &["neon"]),
|
||||
("neon", &["vfp3"]),
|
||||
("sha2", &["neon"]),
|
||||
("v6", &["v5te"]),
|
||||
("v6k", &["v6"]),
|
||||
("v6t2", &["v6k", "thumb2"]),
|
||||
("v7", &["v6t2"]),
|
||||
("v8", &["v7"]),
|
||||
("vfp3", &["vfp2", "d32"]),
|
||||
("vfp4", &["vfp3"]),
|
||||
// Aarch64
|
||||
("aes", &["neon"]),
|
||||
("dotprod", &["neon"]),
|
||||
("dpb2", &["dpb"]),
|
||||
("f32mm", &["sve"]),
|
||||
("f64mm", &["sve"]),
|
||||
("fcma", &["neon"]),
|
||||
("fhm", &["fp16"]),
|
||||
("fp16", &["neon"]),
|
||||
("fp8", &["faminmax", "lut", "bf16"]),
|
||||
("fp8dot2", &["fp8dot4"]),
|
||||
("fp8dot4", &["fp8fma"]),
|
||||
("fp8fma", &["fp8"]),
|
||||
("jsconv", &["neon"]),
|
||||
("lse128", &["lse"]),
|
||||
("rcpc2", &["rcpc"]),
|
||||
("rcpc3", &["rcpc2"]),
|
||||
("rdm", &["neon"]),
|
||||
("sha2", &["neon"]),
|
||||
("sha3", &["sha2"]),
|
||||
("sm4", &["neon"]),
|
||||
("sme", &["bf16"]),
|
||||
("sme-b16b16", &["bf16", "sme2", "sve-b16b16"]),
|
||||
("sme-f16f16", &["sme2"]),
|
||||
("sme-f64f64", &["sme"]),
|
||||
("sme-f8f16", &["sme-f8f32"]),
|
||||
("sme-f8f32", &["sme2", "fp8"]),
|
||||
("sme-fa64", &["sme", "sve2"]),
|
||||
("sme-i16i64", &["sme"]),
|
||||
("sme2", &["sme"]),
|
||||
("sme2p1", &["sme2"]),
|
||||
("ssve-fp8dot2", &["ssve-fp8dot4"]),
|
||||
("ssve-fp8dot4", &["ssve-fp8fma"]),
|
||||
("ssve-fp8fma", &["sme2", "fp8"]),
|
||||
("sve", &["neon"]),
|
||||
("sve-b16b16", &["bf16"]),
|
||||
("sve2", &["sve"]),
|
||||
("sve2-aes", &["sve2", "aes"]),
|
||||
("sve2-bitperm", &["sve2"]),
|
||||
("sve2-sha3", &["sve2", "sha3"]),
|
||||
("sve2-sm4", &["sve2", "sm4"]),
|
||||
("sve2p1", &["sve2"]),
|
||||
("v8.1a", &["crc", "lse", "rdm", "pan", "lor", "vh"]),
|
||||
("v8.2a", &["v8.1a", "ras", "dpb"]),
|
||||
("v8.3a", &["v8.2a", "rcpc", "paca", "pacg", "jsconv"]),
|
||||
("v8.4a", &["v8.3a", "dotprod", "dit", "flagm"]),
|
||||
("v8.5a", &["v8.4a", "ssbs", "sb", "dpb2", "bti"]),
|
||||
("v8.6a", &["v8.5a", "bf16", "i8mm"]),
|
||||
("v8.7a", &["v8.6a", "wfxt"]),
|
||||
("v8.8a", &["v8.7a", "hbc", "mops"]),
|
||||
("v8.9a", &["v8.8a", "cssc"]),
|
||||
("v9.1a", &["v9a", "v8.6a"]),
|
||||
("v9.2a", &["v9.1a", "v8.7a"]),
|
||||
("v9.3a", &["v9.2a", "v8.8a"]),
|
||||
("v9.4a", &["v9.3a", "v8.9a"]),
|
||||
("v9.5a", &["v9.4a"]),
|
||||
("v9a", &["v8.5a", "sve2"]),
|
||||
// x86
|
||||
("aes", &["sse2"]),
|
||||
("amx-bf16", &["amx-tile"]),
|
||||
("amx-complex", &["amx-tile"]),
|
||||
("amx-fp16", &["amx-tile"]),
|
||||
("amx-int8", &["amx-tile"]),
|
||||
("avx", &["sse4.2"]),
|
||||
("avx2", &["avx"]),
|
||||
("avx512bf16", &["avx512bw"]),
|
||||
("avx512bitalg", &["avx512bw"]),
|
||||
("avx512bw", &["avx512f"]),
|
||||
("avx512cd", &["avx512f"]),
|
||||
("avx512dq", &["avx512f"]),
|
||||
("avx512f", &["avx2", "fma", "f16c"]),
|
||||
("avx512fp16", &["avx512bw", "avx512vl", "avx512dq"]),
|
||||
("avx512ifma", &["avx512f"]),
|
||||
("avx512vbmi", &["avx512bw"]),
|
||||
("avx512vbmi2", &["avx512bw"]),
|
||||
("avx512vl", &["avx512f"]),
|
||||
("avx512vnni", &["avx512f"]),
|
||||
("avx512vp2intersect", &["avx512f"]),
|
||||
("avx512vpopcntdq", &["avx512f"]),
|
||||
("avxifma", &["avx2"]),
|
||||
("avxneconvert", &["avx2"]),
|
||||
("avxvnni", &["avx2"]),
|
||||
("avxvnniint16", &["avx2"]),
|
||||
("avxvnniint8", &["avx2"]),
|
||||
("f16c", &["avx"]),
|
||||
("fma", &["avx"]),
|
||||
("gfni", &["sse2"]),
|
||||
("kl", &["sse2"]),
|
||||
("pclmulqdq", &["sse2"]),
|
||||
("sha", &["sse2"]),
|
||||
("sha512", &["avx2"]),
|
||||
("sm3", &["avx"]),
|
||||
("sm4", &["avx2"]),
|
||||
("sse2", &["sse"]),
|
||||
("sse3", &["sse2"]),
|
||||
("sse4.1", &["ssse3"]),
|
||||
("sse4.2", &["sse4.1"]),
|
||||
("sse4a", &["sse3"]),
|
||||
("ssse3", &["sse3"]),
|
||||
("vaes", &["avx2", "aes"]),
|
||||
("vpclmulqdq", &["avx", "pclmulqdq"]),
|
||||
("widekl", &["kl"]),
|
||||
("xop", &[/*"fma4", */ "avx", "sse4a"]),
|
||||
("xsavec", &["xsave"]),
|
||||
("xsaveopt", &["xsave"]),
|
||||
("xsaves", &["xsave"]),
|
||||
// Hexagon
|
||||
("hvx-length128b", &["hvx"]),
|
||||
// PowerPC
|
||||
("power10-vector", &["power9-vector"]),
|
||||
("power8-altivec", &["altivec"]),
|
||||
("power8-crypto", &["power8-altivec"]),
|
||||
("power8-vector", &["vsx", "power8-altivec"]),
|
||||
("power9-altivec", &["power8-altivec"]),
|
||||
("power9-vector", &["power8-vector", "power9-altivec"]),
|
||||
("vsx", &["altivec"]),
|
||||
// MIPS
|
||||
// RISC-V
|
||||
("a", &["zaamo", "zalrsc"]),
|
||||
("d", &["f"]),
|
||||
("zabha", &["zaamo"]),
|
||||
("zdinx", &["zfinx"]),
|
||||
("zfh", &["zfhmin"]),
|
||||
("zfhmin", &["f"]),
|
||||
("zhinx", &["zhinxmin"]),
|
||||
("zhinxmin", &["zfinx"]),
|
||||
("zk", &["zkn", "zkr", "zkt"]),
|
||||
("zkn", &["zbkb", "zbkc", "zbkx", "zkne", "zknd", "zknh"]),
|
||||
("zks", &["zbkb", "zbkc", "zbkx", "zksed", "zksh"]),
|
||||
// WASM
|
||||
("relaxed-simd", &["simd128"]),
|
||||
// BPF
|
||||
("alu32", &[]),
|
||||
// CSKY
|
||||
("10e60", &["7e10"]),
|
||||
("2e3", &["e2"]),
|
||||
("3e3r2", &["3e3r1", "doloop"]),
|
||||
("3e3r3", &["doloop"]),
|
||||
("3e7", &["2e3"]),
|
||||
("7e10", &["3e7"]),
|
||||
("e1", &["elrw"]),
|
||||
("e2", &["e2"]),
|
||||
("mp", &["2e3"]),
|
||||
("mp1e2", &["3e7"]),
|
||||
// LoongArch
|
||||
("d", &["f"]),
|
||||
("lasx", &["lsx"]),
|
||||
("lsx", &["d"]),
|
||||
// IBM Z
|
||||
("nnp-assist", &["vector"]),
|
||||
("vector-enhancements-1", &["vector"]),
|
||||
("vector-enhancements-2", &["vector-enhancements-1"]),
|
||||
("vector-packed-decimal", &["vector"]),
|
||||
("vector-packed-decimal-enhancement", &["vector-packed-decimal"]),
|
||||
("vector-packed-decimal-enhancement-2", &["vector-packed-decimal-enhancement"]),
|
||||
// SPARC
|
||||
// m68k
|
||||
("isa-68010", &["isa-68000"]),
|
||||
("isa-68020", &["isa-68010"]),
|
||||
("isa-68030", &["isa-68020"]),
|
||||
("isa-68040", &["isa-68030", "isa-68882"]),
|
||||
("isa-68060", &["isa-68040"]),
|
||||
("isa-68882", &["isa-68881"]),
|
||||
];
|
||||
// spellchecker:on
|
||||
|
|
@ -9,18 +9,16 @@ use chalk_ir::{
|
|||
DebruijnIndex,
|
||||
};
|
||||
use hir_def::{
|
||||
attr::Attrs,
|
||||
db::DefDatabase,
|
||||
generics::{WherePredicate, WherePredicateTypeTarget},
|
||||
lang_item::LangItem,
|
||||
resolver::{HasResolver, TypeNs},
|
||||
tt,
|
||||
type_ref::{TraitBoundModifier, TypeRef},
|
||||
EnumId, EnumVariantId, FunctionId, Lookup, OpaqueInternableThing, TraitId, TypeAliasId,
|
||||
TypeOrConstParamId,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
use intern::{sym, Symbol};
|
||||
use intern::sym;
|
||||
use rustc_abi::TargetDataLayout;
|
||||
use rustc_hash::FxHashSet;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
|
@ -32,8 +30,8 @@ use crate::{
|
|||
db::HirDatabase,
|
||||
layout::{Layout, TagEncoding},
|
||||
mir::pad16,
|
||||
ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TraitRef, TraitRefExt,
|
||||
Ty, WhereClause,
|
||||
ChalkTraitId, Const, ConstScalar, GenericArg, Interner, Substitution, TargetFeatures, TraitRef,
|
||||
TraitRefExt, Ty, WhereClause,
|
||||
};
|
||||
|
||||
pub(crate) fn fn_traits(
|
||||
|
|
@ -267,32 +265,6 @@ impl<'a> ClosureSubst<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TargetFeatures {
|
||||
enabled: FxHashSet<Symbol>,
|
||||
}
|
||||
|
||||
impl TargetFeatures {
|
||||
pub fn from_attrs(attrs: &Attrs) -> Self {
|
||||
let enabled = attrs
|
||||
.by_key(&sym::target_feature)
|
||||
.tt_values()
|
||||
.filter_map(|tt| {
|
||||
match tt.token_trees().flat_tokens() {
|
||||
[
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(enable_ident)),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. })),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { kind: tt::LitKind::Str, symbol: features, .. })),
|
||||
] if enable_ident.sym == sym::enable => Some(features),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
.flat_map(|features| features.as_str().split(',').map(Symbol::intern))
|
||||
.collect();
|
||||
Self { enabled }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Unsafety {
|
||||
Safe,
|
||||
|
|
@ -314,7 +286,8 @@ pub fn is_fn_unsafe_to_call(
|
|||
|
||||
if data.has_target_feature() {
|
||||
// RFC 2396 <https://rust-lang.github.io/rfcs/2396-target-feature-1.1.html>.
|
||||
let callee_target_features = TargetFeatures::from_attrs(&db.attrs(func.into()));
|
||||
let callee_target_features =
|
||||
TargetFeatures::from_attrs_no_implications(&db.attrs(func.into()));
|
||||
if !caller_target_features.enabled.is_superset(&callee_target_features.enabled) {
|
||||
return Unsafety::Unsafe;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
rustc-hash.workspace = true
|
||||
|
|
|
|||
|
|
@ -142,7 +142,7 @@ pub use {
|
|||
name::Name,
|
||||
prettify_macro_expansion,
|
||||
proc_macro::{ProcMacros, ProcMacrosBuilder},
|
||||
tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt,
|
||||
tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt, MacroKind,
|
||||
},
|
||||
hir_ty::{
|
||||
consteval::ConstEvalError,
|
||||
|
|
@ -152,7 +152,7 @@ pub use {
|
|||
layout::LayoutError,
|
||||
method_resolution::TyFingerprint,
|
||||
mir::{MirEvalError, MirLowerError},
|
||||
CastError, FnAbi, PointerCast, Safety, Variance,
|
||||
CastError, DropGlue, FnAbi, PointerCast, Safety, Variance,
|
||||
},
|
||||
// FIXME: Properly encapsulate mir
|
||||
hir_ty::{mir, Interner as ChalkTyInterner},
|
||||
|
|
@ -699,7 +699,10 @@ impl Module {
|
|||
let source_map = tree_source_maps.impl_(loc.id.value).item();
|
||||
let node = &tree[loc.id.value];
|
||||
let file_id = loc.id.file_id();
|
||||
if file_id.macro_file().is_some_and(|it| it.is_builtin_derive(db.upcast())) {
|
||||
if file_id
|
||||
.macro_file()
|
||||
.is_some_and(|it| it.kind(db.upcast()) == MacroKind::DeriveBuiltIn)
|
||||
{
|
||||
// these expansion come from us, diagnosing them is a waste of resources
|
||||
// FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
|
||||
continue;
|
||||
|
|
@ -1391,6 +1394,10 @@ impl Struct {
|
|||
Type::from_def(db, self.id)
|
||||
}
|
||||
|
||||
pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_def_placeholders(db, self.id)
|
||||
}
|
||||
|
||||
pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_value_def(db, self.id)
|
||||
}
|
||||
|
|
@ -1436,6 +1443,10 @@ impl Union {
|
|||
Type::from_def(db, self.id)
|
||||
}
|
||||
|
||||
pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_def_placeholders(db, self.id)
|
||||
}
|
||||
|
||||
pub fn constructor_ty(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_value_def(db, self.id)
|
||||
}
|
||||
|
|
@ -1490,6 +1501,10 @@ impl Enum {
|
|||
Type::from_def(db, self.id)
|
||||
}
|
||||
|
||||
pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_def_placeholders(db, self.id)
|
||||
}
|
||||
|
||||
/// The type of the enum variant bodies.
|
||||
pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::new_for_crate(
|
||||
|
|
@ -2929,6 +2944,10 @@ impl TypeAlias {
|
|||
Type::from_def(db, self.id)
|
||||
}
|
||||
|
||||
pub fn ty_placeholders(self, db: &dyn HirDatabase) -> Type {
|
||||
Type::from_def_placeholders(db, self.id)
|
||||
}
|
||||
|
||||
pub fn name(self, db: &dyn HirDatabase) -> Name {
|
||||
db.type_alias_data(self.id).name.clone()
|
||||
}
|
||||
|
|
@ -3033,20 +3052,6 @@ impl BuiltinType {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum MacroKind {
|
||||
/// `macro_rules!` or Macros 2.0 macro.
|
||||
Declarative,
|
||||
/// A built-in or custom derive.
|
||||
Derive,
|
||||
/// A built-in function-like macro.
|
||||
BuiltIn,
|
||||
/// A procedural attribute macro.
|
||||
Attr,
|
||||
/// A function-like procedural macro.
|
||||
ProcMacro,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct Macro {
|
||||
pub(crate) id: MacroId,
|
||||
|
|
@ -3077,15 +3082,19 @@ impl Macro {
|
|||
match self.id {
|
||||
MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander {
|
||||
MacroExpander::Declarative => MacroKind::Declarative,
|
||||
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
|
||||
MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
|
||||
MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
|
||||
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => {
|
||||
MacroKind::DeclarativeBuiltIn
|
||||
}
|
||||
MacroExpander::BuiltInAttr(_) => MacroKind::AttrBuiltIn,
|
||||
MacroExpander::BuiltInDerive(_) => MacroKind::DeriveBuiltIn,
|
||||
},
|
||||
MacroId::MacroRulesId(it) => match it.lookup(db.upcast()).expander {
|
||||
MacroExpander::Declarative => MacroKind::Declarative,
|
||||
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
|
||||
MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
|
||||
MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
|
||||
MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => {
|
||||
MacroKind::DeclarativeBuiltIn
|
||||
}
|
||||
MacroExpander::BuiltInAttr(_) => MacroKind::AttrBuiltIn,
|
||||
MacroExpander::BuiltInDerive(_) => MacroKind::DeriveBuiltIn,
|
||||
},
|
||||
MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind {
|
||||
ProcMacroKind::CustomDerive => MacroKind::Derive,
|
||||
|
|
@ -3096,10 +3105,10 @@ impl Macro {
|
|||
}
|
||||
|
||||
pub fn is_fn_like(&self, db: &dyn HirDatabase) -> bool {
|
||||
match self.kind(db) {
|
||||
MacroKind::Declarative | MacroKind::BuiltIn | MacroKind::ProcMacro => true,
|
||||
MacroKind::Attr | MacroKind::Derive => false,
|
||||
}
|
||||
matches!(
|
||||
self.kind(db),
|
||||
MacroKind::Declarative | MacroKind::DeclarativeBuiltIn | MacroKind::ProcMacro
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool {
|
||||
|
|
@ -3139,11 +3148,11 @@ impl Macro {
|
|||
}
|
||||
|
||||
pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
|
||||
matches!(self.kind(db), MacroKind::Attr)
|
||||
matches!(self.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn)
|
||||
}
|
||||
|
||||
pub fn is_derive(&self, db: &dyn HirDatabase) -> bool {
|
||||
matches!(self.kind(db), MacroKind::Derive)
|
||||
matches!(self.kind(db), MacroKind::Derive | MacroKind::DeriveBuiltIn)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -4708,6 +4717,19 @@ impl Type {
|
|||
Type::new(db, def, ty.substitute(Interner, &substs))
|
||||
}
|
||||
|
||||
fn from_def_placeholders(db: &dyn HirDatabase, def: impl Into<TyDefId> + HasResolver) -> Type {
|
||||
let ty = db.ty(def.into());
|
||||
let substs = TyBuilder::placeholder_subst(
|
||||
db,
|
||||
match def.into() {
|
||||
TyDefId::AdtId(it) => GenericDefId::AdtId(it),
|
||||
TyDefId::TypeAliasId(it) => GenericDefId::TypeAliasId(it),
|
||||
TyDefId::BuiltinType(_) => return Type::new(db, def, ty.skip_binders().clone()),
|
||||
},
|
||||
);
|
||||
Type::new(db, def, ty.substitute(Interner, &substs))
|
||||
}
|
||||
|
||||
fn from_value_def(db: &dyn HirDatabase, def: impl Into<ValueTyDefId> + HasResolver) -> Type {
|
||||
let Some(ty) = db.value_ty(def.into()) else {
|
||||
return Type::new(db, def, TyKind::Error.intern(Interner));
|
||||
|
|
@ -5737,6 +5759,10 @@ impl Type {
|
|||
db.layout_of_ty(self.ty.clone(), self.env.clone())
|
||||
.map(|layout| Layout(layout, db.target_data_layout(self.env.krate).unwrap()))
|
||||
}
|
||||
|
||||
pub fn drop_glue(&self, db: &dyn HirDatabase) -> DropGlue {
|
||||
db.has_drop_glue(self.ty.clone(), self.env.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
|
||||
|
|
|
|||
|
|
@ -508,9 +508,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
|
||||
let file_id = self.find_file(adt.syntax()).file_id;
|
||||
let adt = InFile::new(file_id, adt);
|
||||
pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
|
||||
self.with_ctx(|ctx| ctx.has_derives(adt))
|
||||
}
|
||||
|
||||
|
|
@ -551,10 +549,8 @@ impl<'db> SemanticsImpl<'db> {
|
|||
res.is_empty().not().then_some(res)
|
||||
}
|
||||
|
||||
pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
|
||||
let file_id = self.find_file(item.syntax()).file_id;
|
||||
let src = InFile::new(file_id, item);
|
||||
self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
|
||||
pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool {
|
||||
self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some())
|
||||
}
|
||||
|
||||
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
|
||||
|
|
@ -1526,8 +1522,13 @@ impl<'db> SemanticsImpl<'db> {
|
|||
self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
|
||||
}
|
||||
|
||||
// FIXME: Replace this with `resolve_macro_call2`
|
||||
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
|
||||
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
|
||||
self.resolve_macro_call2(macro_call)
|
||||
}
|
||||
|
||||
pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
|
||||
self.with_ctx(|ctx| {
|
||||
ctx.macro_call_to_macro_call(macro_call)
|
||||
.and_then(|call| macro_call_to_macro_id(ctx, call))
|
||||
|
|
@ -1538,8 +1539,8 @@ impl<'db> SemanticsImpl<'db> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn is_proc_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
|
||||
self.resolve_macro_call(macro_call)
|
||||
pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
|
||||
self.resolve_macro_call2(macro_call)
|
||||
.is_some_and(|m| matches!(m.id, MacroId::ProcMacroId(..)))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@
|
|||
//!
|
||||
//! This module solves the following problem:
|
||||
//!
|
||||
//! Given a piece of syntax, find the corresponding semantic definition (def).
|
||||
//! > Given a piece of syntax, find the corresponding semantic definition (def).
|
||||
//!
|
||||
//! This problem is a part of more-or-less every IDE feature implemented. Every
|
||||
//! IDE functionality (like goto to definition), conceptually starts with a
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ fn mod_item_path_str(
|
|||
/// Type tree shows how can we get from set of types to some type.
|
||||
///
|
||||
/// Consider the following code as an example
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// fn foo(x: i32, y: bool) -> Option<i32> { None }
|
||||
/// fn bar() {
|
||||
/// let a = 1;
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
|
|
|||
|
|
@ -52,6 +52,10 @@ pub(crate) struct AssistContext<'a> {
|
|||
frange: FileRange,
|
||||
trimmed_range: TextRange,
|
||||
source_file: SourceFile,
|
||||
// We cache this here to speed up things slightly
|
||||
token_at_offset: TokenAtOffset<SyntaxToken>,
|
||||
// We cache this here to speed up things slightly
|
||||
covering_element: SyntaxElement,
|
||||
}
|
||||
|
||||
impl<'a> AssistContext<'a> {
|
||||
|
|
@ -78,8 +82,18 @@ impl<'a> AssistContext<'a> {
|
|||
// Selection solely consists of whitespace so just fall back to the original
|
||||
_ => frange.range,
|
||||
};
|
||||
let token_at_offset = source_file.syntax().token_at_offset(frange.range.start());
|
||||
let covering_element = source_file.syntax().covering_element(trimmed_range);
|
||||
|
||||
AssistContext { config, sema, frange, source_file, trimmed_range }
|
||||
AssistContext {
|
||||
config,
|
||||
sema,
|
||||
frange,
|
||||
source_file,
|
||||
trimmed_range,
|
||||
token_at_offset,
|
||||
covering_element,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn db(&self) -> &RootDatabase {
|
||||
|
|
@ -114,7 +128,7 @@ impl<'a> AssistContext<'a> {
|
|||
}
|
||||
|
||||
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
|
||||
self.source_file.syntax().token_at_offset(self.offset())
|
||||
self.token_at_offset.clone()
|
||||
}
|
||||
pub(crate) fn find_token_syntax_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
|
||||
self.token_at_offset().find(|it| it.kind() == kind)
|
||||
|
|
@ -136,7 +150,7 @@ impl<'a> AssistContext<'a> {
|
|||
}
|
||||
/// Returns the element covered by the selection range, this excludes trailing whitespace in the selection.
|
||||
pub(crate) fn covering_element(&self) -> SyntaxElement {
|
||||
self.source_file.syntax().covering_element(self.selection_trimmed())
|
||||
self.covering_element.clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -76,6 +76,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
|
||||
let cfg = ctx.config.import_path_config();
|
||||
|
||||
let make = SyntaxFactory::new();
|
||||
|
||||
let module = ctx.sema.scope(expr.syntax())?.module();
|
||||
let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
|
||||
Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>,
|
||||
|
|
@ -93,7 +95,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
.into_iter()
|
||||
.filter_map(|variant| {
|
||||
Some((
|
||||
build_pat(ctx, module, variant, cfg)?,
|
||||
build_pat(ctx, &make, module, variant, cfg)?,
|
||||
variant.should_be_hidden(ctx.db(), module.krate()),
|
||||
))
|
||||
})
|
||||
|
|
@ -144,10 +146,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
let is_hidden = variants
|
||||
.iter()
|
||||
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
|
||||
let patterns =
|
||||
variants.into_iter().filter_map(|variant| build_pat(ctx, module, variant, cfg));
|
||||
let patterns = variants
|
||||
.into_iter()
|
||||
.filter_map(|variant| build_pat(ctx, &make, module, variant, cfg));
|
||||
|
||||
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
|
||||
(ast::Pat::from(make.tuple_pat(patterns)), is_hidden)
|
||||
})
|
||||
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
|
||||
(
|
||||
|
|
@ -176,9 +179,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
let is_hidden = variants
|
||||
.iter()
|
||||
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
|
||||
let patterns =
|
||||
variants.into_iter().filter_map(|variant| build_pat(ctx, module, variant, cfg));
|
||||
(ast::Pat::from(make::slice_pat(patterns)), is_hidden)
|
||||
let patterns = variants
|
||||
.into_iter()
|
||||
.filter_map(|variant| build_pat(ctx, &make, module, variant, cfg));
|
||||
|
||||
(ast::Pat::from(make.slice_pat(patterns)), is_hidden)
|
||||
})
|
||||
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
|
||||
(
|
||||
|
|
@ -203,8 +208,6 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
"Fill match arms",
|
||||
ctx.sema.original_range(match_expr.syntax()).range,
|
||||
|builder| {
|
||||
let make = SyntaxFactory::new();
|
||||
|
||||
// having any hidden variants means that we need a catch-all arm
|
||||
needs_catch_all_arm |= has_hidden_variants;
|
||||
|
||||
|
|
@ -243,7 +246,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
|
||||
if needs_catch_all_arm && !has_catch_all_arm {
|
||||
cov_mark::hit!(added_wildcard_pattern);
|
||||
let arm = make.match_arm(make::wildcard_pat().into(), None, make::ext::expr_todo());
|
||||
let arm = make.match_arm(make.wildcard_pat().into(), None, make::ext::expr_todo());
|
||||
arms.push(arm);
|
||||
}
|
||||
|
||||
|
|
@ -290,7 +293,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
}
|
||||
}
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
editor.add_mappings(make.take());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
|
|
@ -445,6 +448,7 @@ fn resolve_array_of_enum_def(
|
|||
|
||||
fn build_pat(
|
||||
ctx: &AssistContext<'_>,
|
||||
make: &SyntaxFactory,
|
||||
module: hir::Module,
|
||||
var: ExtendedVariant,
|
||||
cfg: ImportPathConfig,
|
||||
|
|
@ -455,31 +459,32 @@ fn build_pat(
|
|||
let edition = module.krate().edition(db);
|
||||
let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition);
|
||||
let fields = var.fields(db);
|
||||
let pat = match var.kind(db) {
|
||||
let pat: ast::Pat = match var.kind(db) {
|
||||
hir::StructKind::Tuple => {
|
||||
let mut name_generator = suggest_name::NameGenerator::new();
|
||||
let pats = fields.into_iter().map(|f| {
|
||||
let name = name_generator.for_type(&f.ty(db), db, edition);
|
||||
match name {
|
||||
Some(name) => make::ext::simple_ident_pat(make::name(&name)).into(),
|
||||
None => make::wildcard_pat().into(),
|
||||
Some(name) => make::ext::simple_ident_pat(make.name(&name)).into(),
|
||||
None => make.wildcard_pat().into(),
|
||||
}
|
||||
});
|
||||
make::tuple_struct_pat(path, pats).into()
|
||||
make.tuple_struct_pat(path, pats).into()
|
||||
}
|
||||
hir::StructKind::Record => {
|
||||
let pats = fields
|
||||
let fields = fields
|
||||
.into_iter()
|
||||
.map(|f| make::name(f.name(db).as_str()))
|
||||
.map(|name| make::ext::simple_ident_pat(name).into());
|
||||
make::record_pat(path, pats).into()
|
||||
.map(|f| make.name_ref(f.name(db).as_str()))
|
||||
.map(|name_ref| make.record_pat_field_shorthand(name_ref));
|
||||
let fields = make.record_pat_field_list(fields, None);
|
||||
make.record_pat_with_fields(path, fields).into()
|
||||
}
|
||||
hir::StructKind::Unit => make::path_pat(path),
|
||||
hir::StructKind::Unit => make.path_pat(path),
|
||||
};
|
||||
Some(pat)
|
||||
}
|
||||
ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))),
|
||||
ExtendedVariant::False => Some(ast::Pat::from(make::literal_pat("false"))),
|
||||
ExtendedVariant::True => Some(ast::Pat::from(make.literal_pat("true"))),
|
||||
ExtendedVariant::False => Some(ast::Pat::from(make.literal_pat("false"))),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,12 +3,19 @@ use std::collections::VecDeque;
|
|||
use ide_db::{
|
||||
assists::GroupLabel,
|
||||
famous_defs::FamousDefs,
|
||||
source_change::SourceChangeBuilder,
|
||||
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
|
||||
};
|
||||
use syntax::{
|
||||
ast::{self, make, AstNode, Expr::BinExpr, HasArgList},
|
||||
ted, SyntaxKind, T,
|
||||
ast::{
|
||||
self,
|
||||
prec::{precedence, ExprPrecedence},
|
||||
syntax_factory::SyntaxFactory,
|
||||
AstNode,
|
||||
Expr::BinExpr,
|
||||
HasArgList,
|
||||
},
|
||||
syntax_editor::{Position, SyntaxEditor},
|
||||
SyntaxKind, T,
|
||||
};
|
||||
|
||||
use crate::{utils::invert_boolean_expression, AssistContext, AssistId, AssistKind, Assists};
|
||||
|
|
@ -52,53 +59,60 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
|
|||
}
|
||||
|
||||
let op = bin_expr.op_kind()?;
|
||||
let inv_token = match op {
|
||||
ast::BinaryOp::LogicOp(ast::LogicOp::And) => SyntaxKind::PIPE2,
|
||||
ast::BinaryOp::LogicOp(ast::LogicOp::Or) => SyntaxKind::AMP2,
|
||||
let (inv_token, prec) = match op {
|
||||
ast::BinaryOp::LogicOp(ast::LogicOp::And) => (SyntaxKind::PIPE2, ExprPrecedence::LOr),
|
||||
ast::BinaryOp::LogicOp(ast::LogicOp::Or) => (SyntaxKind::AMP2, ExprPrecedence::LAnd),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let demorganed = bin_expr.clone_subtree().clone_for_update();
|
||||
let make = SyntaxFactory::new();
|
||||
|
||||
let demorganed = bin_expr.clone_subtree();
|
||||
let mut editor = SyntaxEditor::new(demorganed.syntax().clone());
|
||||
editor.replace(demorganed.op_token()?, make.token(inv_token));
|
||||
|
||||
ted::replace(demorganed.op_token()?, ast::make::token(inv_token));
|
||||
let mut exprs = VecDeque::from([
|
||||
(bin_expr.lhs()?, demorganed.lhs()?),
|
||||
(bin_expr.rhs()?, demorganed.rhs()?),
|
||||
(bin_expr.lhs()?, demorganed.lhs()?, prec),
|
||||
(bin_expr.rhs()?, demorganed.rhs()?, prec),
|
||||
]);
|
||||
|
||||
while let Some((expr, dm)) = exprs.pop_front() {
|
||||
while let Some((expr, demorganed, prec)) = exprs.pop_front() {
|
||||
if let BinExpr(bin_expr) = &expr {
|
||||
if let BinExpr(cbin_expr) = &dm {
|
||||
if let BinExpr(cbin_expr) = &demorganed {
|
||||
if op == bin_expr.op_kind()? {
|
||||
ted::replace(cbin_expr.op_token()?, ast::make::token(inv_token));
|
||||
exprs.push_back((bin_expr.lhs()?, cbin_expr.lhs()?));
|
||||
exprs.push_back((bin_expr.rhs()?, cbin_expr.rhs()?));
|
||||
editor.replace(cbin_expr.op_token()?, make.token(inv_token));
|
||||
exprs.push_back((bin_expr.lhs()?, cbin_expr.lhs()?, prec));
|
||||
exprs.push_back((bin_expr.rhs()?, cbin_expr.rhs()?, prec));
|
||||
} else {
|
||||
let mut inv = invert_boolean_expression(expr);
|
||||
if inv.needs_parens_in(dm.syntax().parent()?) {
|
||||
inv = ast::make::expr_paren(inv).clone_for_update();
|
||||
let mut inv = invert_boolean_expression(&make, expr);
|
||||
if precedence(&inv).needs_parentheses_in(prec) {
|
||||
inv = make.expr_paren(inv).into();
|
||||
}
|
||||
ted::replace(dm.syntax(), inv.syntax());
|
||||
editor.replace(demorganed.syntax(), inv.syntax());
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
} else {
|
||||
let mut inv = invert_boolean_expression(dm.clone_subtree()).clone_for_update();
|
||||
if inv.needs_parens_in(dm.syntax().parent()?) {
|
||||
inv = ast::make::expr_paren(inv).clone_for_update();
|
||||
let mut inv = invert_boolean_expression(&make, demorganed.clone());
|
||||
if precedence(&inv).needs_parentheses_in(prec) {
|
||||
inv = make.expr_paren(inv).into();
|
||||
}
|
||||
ted::replace(dm.syntax(), inv.syntax());
|
||||
editor.replace(demorganed.syntax(), inv.syntax());
|
||||
}
|
||||
}
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
let edit = editor.finish();
|
||||
let demorganed = ast::Expr::cast(edit.new_root().clone())?;
|
||||
|
||||
acc.add_group(
|
||||
&GroupLabel("Apply De Morgan's law".to_owned()),
|
||||
AssistId("apply_demorgan", AssistKind::RefactorRewrite),
|
||||
"Apply De Morgan's law",
|
||||
op_range,
|
||||
|edit| {
|
||||
let demorganed = ast::Expr::BinExpr(demorganed);
|
||||
|builder| {
|
||||
let make = SyntaxFactory::new();
|
||||
let paren_expr = bin_expr.syntax().parent().and_then(ast::ParenExpr::cast);
|
||||
let neg_expr = paren_expr
|
||||
.clone()
|
||||
|
|
@ -107,24 +121,32 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
|
|||
.filter(|prefix_expr| matches!(prefix_expr.op_kind(), Some(ast::UnaryOp::Not)))
|
||||
.map(ast::Expr::PrefixExpr);
|
||||
|
||||
let mut editor;
|
||||
if let Some(paren_expr) = paren_expr {
|
||||
if let Some(neg_expr) = neg_expr {
|
||||
cov_mark::hit!(demorgan_double_negation);
|
||||
let parent = neg_expr.syntax().parent();
|
||||
editor = builder.make_editor(neg_expr.syntax());
|
||||
|
||||
if parent.is_some_and(|parent| demorganed.needs_parens_in(parent)) {
|
||||
if parent.is_some_and(|parent| demorganed.needs_parens_in(&parent)) {
|
||||
cov_mark::hit!(demorgan_keep_parens_for_op_precedence2);
|
||||
edit.replace_ast(neg_expr, make::expr_paren(demorganed));
|
||||
editor.replace(neg_expr.syntax(), make.expr_paren(demorganed).syntax());
|
||||
} else {
|
||||
edit.replace_ast(neg_expr, demorganed);
|
||||
editor.replace(neg_expr.syntax(), demorganed.syntax());
|
||||
};
|
||||
} else {
|
||||
cov_mark::hit!(demorgan_double_parens);
|
||||
edit.replace_ast(paren_expr.into(), add_bang_paren(demorganed));
|
||||
editor = builder.make_editor(paren_expr.syntax());
|
||||
|
||||
editor.replace(paren_expr.syntax(), add_bang_paren(&make, demorganed).syntax());
|
||||
}
|
||||
} else {
|
||||
edit.replace_ast(bin_expr.into(), add_bang_paren(demorganed));
|
||||
editor = builder.make_editor(bin_expr.syntax());
|
||||
editor.replace(bin_expr.syntax(), add_bang_paren(&make, demorganed).syntax());
|
||||
}
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -161,7 +183,7 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
let (name, arg_expr) = validate_method_call_expr(ctx, &method_call)?;
|
||||
|
||||
let ast::Expr::ClosureExpr(closure_expr) = arg_expr else { return None };
|
||||
let closure_body = closure_expr.body()?;
|
||||
let closure_body = closure_expr.body()?.clone_for_update();
|
||||
|
||||
let op_range = method_call.syntax().text_range();
|
||||
let label = format!("Apply De Morgan's law to `Iterator::{}`", name.text().as_str());
|
||||
|
|
@ -170,18 +192,19 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
AssistId("apply_demorgan_iterator", AssistKind::RefactorRewrite),
|
||||
label,
|
||||
op_range,
|
||||
|edit| {
|
||||
|builder| {
|
||||
let make = SyntaxFactory::new();
|
||||
let mut editor = builder.make_editor(method_call.syntax());
|
||||
// replace the method name
|
||||
let new_name = match name.text().as_str() {
|
||||
"all" => make::name_ref("any"),
|
||||
"any" => make::name_ref("all"),
|
||||
"all" => make.name_ref("any"),
|
||||
"any" => make.name_ref("all"),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
.clone_for_update();
|
||||
edit.replace_ast(name, new_name);
|
||||
};
|
||||
editor.replace(name.syntax(), new_name.syntax());
|
||||
|
||||
// negate all tail expressions in the closure body
|
||||
let tail_cb = &mut |e: &_| tail_cb_impl(edit, e);
|
||||
let tail_cb = &mut |e: &_| tail_cb_impl(&mut editor, &make, e);
|
||||
walk_expr(&closure_body, &mut |expr| {
|
||||
if let ast::Expr::ReturnExpr(ret_expr) = expr {
|
||||
if let Some(ret_expr_arg) = &ret_expr.expr() {
|
||||
|
|
@ -198,15 +221,15 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
.and_then(ast::PrefixExpr::cast)
|
||||
.filter(|prefix_expr| matches!(prefix_expr.op_kind(), Some(ast::UnaryOp::Not)))
|
||||
{
|
||||
edit.delete(
|
||||
prefix_expr
|
||||
.op_token()
|
||||
.expect("prefix expression always has an operator")
|
||||
.text_range(),
|
||||
editor.delete(
|
||||
prefix_expr.op_token().expect("prefix expression always has an operator"),
|
||||
);
|
||||
} else {
|
||||
edit.insert(method_call.syntax().text_range().start(), "!");
|
||||
editor.insert(Position::before(method_call.syntax()), make.token(SyntaxKind::BANG));
|
||||
}
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -233,26 +256,26 @@ fn validate_method_call_expr(
|
|||
it_type.impls_trait(sema.db, iter_trait, &[]).then_some((name_ref, arg_expr))
|
||||
}
|
||||
|
||||
fn tail_cb_impl(edit: &mut SourceChangeBuilder, e: &ast::Expr) {
|
||||
fn tail_cb_impl(editor: &mut SyntaxEditor, make: &SyntaxFactory, e: &ast::Expr) {
|
||||
match e {
|
||||
ast::Expr::BreakExpr(break_expr) => {
|
||||
if let Some(break_expr_arg) = break_expr.expr() {
|
||||
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(edit, e))
|
||||
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(editor, make, e))
|
||||
}
|
||||
}
|
||||
ast::Expr::ReturnExpr(_) => {
|
||||
// all return expressions have already been handled by the walk loop
|
||||
}
|
||||
e => {
|
||||
let inverted_body = invert_boolean_expression(e.clone());
|
||||
edit.replace(e.syntax().text_range(), inverted_body.syntax().text());
|
||||
let inverted_body = invert_boolean_expression(make, e.clone());
|
||||
editor.replace(e.syntax(), inverted_body.syntax());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Add bang and parentheses to the expression.
|
||||
fn add_bang_paren(expr: ast::Expr) -> ast::Expr {
|
||||
make::expr_prefix(T![!], make::expr_paren(expr)).into()
|
||||
fn add_bang_paren(make: &SyntaxFactory, expr: ast::Expr) -> ast::Expr {
|
||||
make.expr_prefix(T![!], make.expr_paren(expr).into()).into()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
|||
|
|
@ -8,8 +8,9 @@ use ide_db::{
|
|||
};
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
ast::{self, edit::AstNodeEdit, make, HasArgList},
|
||||
ted, AstNode, SyntaxNode,
|
||||
ast::{self, edit::AstNodeEdit, syntax_factory::SyntaxFactory, HasArgList},
|
||||
syntax_editor::SyntaxEditor,
|
||||
AstNode, SyntaxNode,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
|
|
@ -76,9 +77,9 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
"Convert `if` expression to `bool::then` call",
|
||||
target,
|
||||
|builder| {
|
||||
let closure_body = closure_body.clone_for_update();
|
||||
let closure_body = closure_body.clone_subtree();
|
||||
let mut editor = SyntaxEditor::new(closure_body.syntax().clone());
|
||||
// Rewrite all `Some(e)` in tail position to `e`
|
||||
let mut replacements = Vec::new();
|
||||
for_each_tail_expr(&closure_body, &mut |e| {
|
||||
let e = match e {
|
||||
ast::Expr::BreakExpr(e) => e.expr(),
|
||||
|
|
@ -88,12 +89,16 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
if let Some(ast::Expr::CallExpr(call)) = e {
|
||||
if let Some(arg_list) = call.arg_list() {
|
||||
if let Some(arg) = arg_list.args().next() {
|
||||
replacements.push((call.syntax().clone(), arg.syntax().clone()));
|
||||
editor.replace(call.syntax(), arg.syntax());
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
replacements.into_iter().for_each(|(old, new)| ted::replace(old, new));
|
||||
let edit = editor.finish();
|
||||
let closure_body = ast::Expr::cast(edit.new_root().clone()).unwrap();
|
||||
|
||||
let mut editor = builder.make_editor(expr.syntax());
|
||||
let make = SyntaxFactory::new();
|
||||
let closure_body = match closure_body {
|
||||
ast::Expr::BlockExpr(block) => unwrap_trivial_block(block),
|
||||
e => e,
|
||||
|
|
@ -119,11 +124,18 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
| ast::Expr::WhileExpr(_)
|
||||
| ast::Expr::YieldExpr(_)
|
||||
);
|
||||
let cond = if invert_cond { invert_boolean_expression(cond) } else { cond };
|
||||
let cond = if parenthesize { make::expr_paren(cond) } else { cond };
|
||||
let arg_list = make::arg_list(Some(make::expr_closure(None, closure_body)));
|
||||
let mcall = make::expr_method_call(cond, make::name_ref("then"), arg_list);
|
||||
builder.replace(target, mcall.to_string());
|
||||
let cond = if invert_cond {
|
||||
invert_boolean_expression(&make, cond)
|
||||
} else {
|
||||
cond.clone_for_update()
|
||||
};
|
||||
let cond = if parenthesize { make.expr_paren(cond).into() } else { cond };
|
||||
let arg_list = make.arg_list(Some(make.expr_closure(None, closure_body).into()));
|
||||
let mcall = make.expr_method_call(cond, make.name_ref("then"), arg_list);
|
||||
editor.replace(expr.syntax(), mcall.syntax());
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -173,16 +185,17 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
"Convert `bool::then` call to `if`",
|
||||
target,
|
||||
|builder| {
|
||||
let closure_body = match closure_body {
|
||||
let mapless_make = SyntaxFactory::without_mappings();
|
||||
let closure_body = match closure_body.reset_indent() {
|
||||
ast::Expr::BlockExpr(block) => block,
|
||||
e => make::block_expr(None, Some(e)),
|
||||
e => mapless_make.block_expr(None, Some(e)),
|
||||
};
|
||||
|
||||
let closure_body = closure_body.clone_for_update();
|
||||
let closure_body = closure_body.clone_subtree();
|
||||
let mut editor = SyntaxEditor::new(closure_body.syntax().clone());
|
||||
// Wrap all tails in `Some(...)`
|
||||
let none_path = make::expr_path(make::ext::ident_path("None"));
|
||||
let some_path = make::expr_path(make::ext::ident_path("Some"));
|
||||
let mut replacements = Vec::new();
|
||||
let none_path = mapless_make.expr_path(mapless_make.ident_path("None"));
|
||||
let some_path = mapless_make.expr_path(mapless_make.ident_path("Some"));
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(closure_body.clone()), &mut |e| {
|
||||
let e = match e {
|
||||
ast::Expr::BreakExpr(e) => e.expr(),
|
||||
|
|
@ -190,28 +203,37 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
_ => Some(e.clone()),
|
||||
};
|
||||
if let Some(expr) = e {
|
||||
replacements.push((
|
||||
editor.replace(
|
||||
expr.syntax().clone(),
|
||||
make::expr_call(some_path.clone(), make::arg_list(Some(expr)))
|
||||
mapless_make
|
||||
.expr_call(some_path.clone(), mapless_make.arg_list(Some(expr)))
|
||||
.syntax()
|
||||
.clone_for_update(),
|
||||
));
|
||||
.clone(),
|
||||
);
|
||||
}
|
||||
});
|
||||
replacements.into_iter().for_each(|(old, new)| ted::replace(old, new));
|
||||
let edit = editor.finish();
|
||||
let closure_body = ast::BlockExpr::cast(edit.new_root().clone()).unwrap();
|
||||
|
||||
let mut editor = builder.make_editor(mcall.syntax());
|
||||
let make = SyntaxFactory::new();
|
||||
|
||||
let cond = match &receiver {
|
||||
ast::Expr::ParenExpr(expr) => expr.expr().unwrap_or(receiver),
|
||||
_ => receiver,
|
||||
};
|
||||
let if_expr = make::expr_if(
|
||||
cond,
|
||||
closure_body.reset_indent(),
|
||||
Some(ast::ElseBranch::Block(make::block_expr(None, Some(none_path)))),
|
||||
)
|
||||
.indent(mcall.indent_level());
|
||||
let if_expr = make
|
||||
.expr_if(
|
||||
cond,
|
||||
closure_body,
|
||||
Some(ast::ElseBranch::Block(make.block_expr(None, Some(none_path)))),
|
||||
)
|
||||
.indent(mcall.indent_level())
|
||||
.clone_for_update();
|
||||
editor.replace(mcall.syntax().clone(), if_expr.syntax().clone());
|
||||
|
||||
builder.replace(target, if_expr.to_string());
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -136,7 +136,7 @@ fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacem
|
|||
|
||||
/// Not all comments are valid candidates for conversion into doc comments. For example, the
|
||||
/// comments in the code:
|
||||
/// ```rust
|
||||
/// ```ignore
|
||||
/// // Brilliant module right here
|
||||
///
|
||||
/// // Really good right
|
||||
|
|
@ -148,7 +148,7 @@ fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacem
|
|||
/// mod nice_module {}
|
||||
/// ```
|
||||
/// can be converted to doc comments. However, the comments in this example:
|
||||
/// ```rust
|
||||
/// ```ignore
|
||||
/// fn foo_bar(foo: Foo /* not bar yet */) -> Bar {
|
||||
/// foo.into_bar()
|
||||
/// // Nicely done
|
||||
|
|
@ -162,7 +162,7 @@ fn comment_to_doc(acc: &mut Assists, comment: ast::Comment, style: CommentPlacem
|
|||
/// are not allowed to become doc comments. Moreover, some comments _are_ allowed, but aren't common
|
||||
/// style in Rust. For example, the following comments are allowed to be doc comments, but it is not
|
||||
/// common style for them to be:
|
||||
/// ```rust
|
||||
/// ```ignore
|
||||
/// fn foo_bar(foo: Foo) -> Bar {
|
||||
/// // this could be an inner comment with //!
|
||||
/// foo.into_bar()
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ use syntax::{
|
|||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
utils::invert_boolean_expression,
|
||||
utils::invert_boolean_expression_legacy,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
|
|
@ -139,7 +139,7 @@ fn if_expr_to_guarded_return(
|
|||
let new_expr = {
|
||||
let then_branch =
|
||||
make::block_expr(once(make::expr_stmt(early_expression).into()), None);
|
||||
let cond = invert_boolean_expression(cond_expr);
|
||||
let cond = invert_boolean_expression_legacy(cond_expr);
|
||||
make::expr_if(cond, then_branch, None).indent(if_indent_level)
|
||||
};
|
||||
new_expr.syntax().clone_for_update()
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use syntax::{
|
|||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
utils::invert_boolean_expression,
|
||||
utils::invert_boolean_expression_legacy,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
|
|
@ -63,7 +63,7 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
let stmts = iter::once(make::expr_stmt(if_expr.into()).into());
|
||||
make::block_expr(stmts, None)
|
||||
} else {
|
||||
let if_cond = invert_boolean_expression(while_cond);
|
||||
let if_cond = invert_boolean_expression_legacy(while_cond);
|
||||
let if_expr = make::expr_if(if_cond, break_block, None).syntax().clone().into();
|
||||
let elements = while_body.stmt_list().map_or_else(
|
||||
|| Either::Left(iter::empty()),
|
||||
|
|
|
|||
|
|
@ -272,7 +272,7 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef
|
|||
/// * We want whole node, like `loop {}`, `2 + 2`, `{ let n = 1; }` exprs.
|
||||
/// Then we can use `ast::Expr`
|
||||
/// * We want a few statements for a block. E.g.
|
||||
/// ```rust,no_run
|
||||
/// ```ignore
|
||||
/// fn foo() -> i32 {
|
||||
/// let m = 1;
|
||||
/// $0
|
||||
|
|
@ -386,7 +386,7 @@ struct ContainerInfo {
|
|||
/// Control flow that is exported from extracted function
|
||||
///
|
||||
/// E.g.:
|
||||
/// ```rust,no_run
|
||||
/// ```ignore
|
||||
/// loop {
|
||||
/// $0
|
||||
/// if 42 == 42 {
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
use syntax::{
|
||||
algo::non_trivia_sibling,
|
||||
ast::{self, syntax_factory::SyntaxFactory},
|
||||
syntax_editor::{Element, SyntaxMapping},
|
||||
AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxToken, T,
|
||||
syntax_editor::SyntaxMapping,
|
||||
AstNode, Direction, NodeOrToken, SyntaxKind, SyntaxToken, T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
|
@ -39,37 +39,24 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
|
|||
return None;
|
||||
}
|
||||
|
||||
let prev = match prev {
|
||||
SyntaxElement::Node(node) => node.syntax_element(),
|
||||
_ => prev,
|
||||
};
|
||||
let next = match next {
|
||||
SyntaxElement::Node(node) => node.syntax_element(),
|
||||
_ => next,
|
||||
};
|
||||
let target = comma.text_range();
|
||||
acc.add(AssistId("flip_comma", AssistKind::RefactorRewrite), "Flip comma", target, |builder| {
|
||||
let parent = comma.parent().unwrap();
|
||||
let mut editor = builder.make_editor(&parent);
|
||||
|
||||
acc.add(
|
||||
AssistId("flip_comma", AssistKind::RefactorRewrite),
|
||||
"Flip comma",
|
||||
comma.text_range(),
|
||||
|builder| {
|
||||
let parent = comma.parent().unwrap();
|
||||
let mut editor = builder.make_editor(&parent);
|
||||
if let Some(parent) = ast::TokenTree::cast(parent) {
|
||||
// An attribute. It often contains a path followed by a
|
||||
// token tree (e.g. `align(2)`), so we have to be smarter.
|
||||
let (new_tree, mapping) = flip_tree(parent.clone(), comma);
|
||||
editor.replace(parent.syntax(), new_tree.syntax());
|
||||
editor.add_mappings(mapping);
|
||||
} else {
|
||||
editor.replace(prev.clone(), next.clone());
|
||||
editor.replace(next.clone(), prev.clone());
|
||||
}
|
||||
|
||||
if let Some(parent) = ast::TokenTree::cast(parent) {
|
||||
// An attribute. It often contains a path followed by a
|
||||
// token tree (e.g. `align(2)`), so we have to be smarter.
|
||||
let (new_tree, mapping) = flip_tree(parent.clone(), comma);
|
||||
editor.replace(parent.syntax(), new_tree.syntax());
|
||||
editor.add_mappings(mapping);
|
||||
} else {
|
||||
editor.replace(prev.clone(), next.clone());
|
||||
editor.replace(next.clone(), prev.clone());
|
||||
}
|
||||
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
})
|
||||
}
|
||||
|
||||
fn flip_tree(tree: ast::TokenTree, comma: SyntaxToken) -> (ast::TokenTree, SyntaxMapping) {
|
||||
|
|
|
|||
80
crates/ide-assists/src/handlers/flip_or_pattern.rs
Normal file
80
crates/ide-assists/src/handlers/flip_or_pattern.rs
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
use syntax::{
|
||||
algo::non_trivia_sibling,
|
||||
ast::{self, AstNode},
|
||||
Direction, T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
// Assist: flip_or_pattern
|
||||
//
|
||||
// Flips two patterns in an or-pattern.
|
||||
//
|
||||
// ```
|
||||
// fn foo() {
|
||||
// let (a |$0 b) = 1;
|
||||
// }
|
||||
// ```
|
||||
// ->
|
||||
// ```
|
||||
// fn foo() {
|
||||
// let (b | a) = 1;
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn flip_or_pattern(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
// Only flip on the `|` token
|
||||
let pipe = ctx.find_token_syntax_at_offset(T![|])?;
|
||||
|
||||
let parent = ast::OrPat::cast(pipe.parent()?)?;
|
||||
|
||||
let before = non_trivia_sibling(pipe.clone().into(), Direction::Prev)?.into_node()?;
|
||||
let after = non_trivia_sibling(pipe.clone().into(), Direction::Next)?.into_node()?;
|
||||
|
||||
let target = pipe.text_range();
|
||||
acc.add(
|
||||
AssistId("flip_or_pattern", AssistKind::RefactorRewrite),
|
||||
"Flip patterns",
|
||||
target,
|
||||
|builder| {
|
||||
let mut editor = builder.make_editor(parent.syntax());
|
||||
editor.replace(before.clone(), after.clone());
|
||||
editor.replace(after, before);
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
|
||||
|
||||
#[test]
|
||||
fn flip_or_pattern_assist_available() {
|
||||
check_assist_target(flip_or_pattern, "fn main(a |$0 b: ()) {}", "|")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flip_or_pattern_not_applicable_for_leading_pipe() {
|
||||
check_assist_not_applicable(flip_or_pattern, "fn main(|$0 b: ()) {}")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flip_or_pattern_works() {
|
||||
check_assist(
|
||||
flip_or_pattern,
|
||||
"fn foo() { let (a | b |$0 c | d) = 1; }",
|
||||
"fn foo() { let (a | c | b | d) = 1; }",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flip_or_pattern_works_match_guard() {
|
||||
check_assist(
|
||||
flip_or_pattern,
|
||||
"fn foo() { match() { a |$0 b if true => () }}",
|
||||
"fn foo() { match() { b | a if true => () }}",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -18,17 +18,14 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
|
|||
// fn foo<T: Copy + Clone>() { }
|
||||
// ```
|
||||
pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
// We want to replicate the behavior of `flip_binexpr` by only suggesting
|
||||
// the assist when the cursor is on a `+`
|
||||
// Only flip on the `+` token
|
||||
let plus = ctx.find_token_syntax_at_offset(T![+])?;
|
||||
|
||||
// Make sure we're in a `TypeBoundList`
|
||||
let parent = ast::TypeBoundList::cast(plus.parent()?)?;
|
||||
|
||||
let (before, after) = (
|
||||
non_trivia_sibling(plus.clone().into(), Direction::Prev)?.into_node()?,
|
||||
non_trivia_sibling(plus.clone().into(), Direction::Next)?.into_node()?,
|
||||
);
|
||||
let before = non_trivia_sibling(plus.clone().into(), Direction::Prev)?.into_node()?;
|
||||
let after = non_trivia_sibling(plus.clone().into(), Direction::Next)?.into_node()?;
|
||||
|
||||
let target = plus.text_range();
|
||||
acc.add(
|
||||
|
|
|
|||
|
|
@ -1037,7 +1037,7 @@ fn filter_bounds_in_scope(
|
|||
|
||||
/// Makes duplicate argument names unique by appending incrementing numbers.
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// let mut names: Vec<String> =
|
||||
/// vec!["foo".into(), "foo".into(), "bar".into(), "baz".into(), "bar".into()];
|
||||
/// deduplicate_arg_names(&mut names);
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
|
|||
// data: T,
|
||||
// }
|
||||
//
|
||||
// impl<T: Clone> ${0:_} for Ctx<T> {}
|
||||
// impl<T: Clone> ${1:_} for Ctx<T> {$0}
|
||||
// ```
|
||||
pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let nominal = ctx.find_node_at_offset::<ast::Adt>()?;
|
||||
|
|
@ -102,6 +102,10 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
|||
if let Some(trait_) = impl_.trait_() {
|
||||
edit.add_placeholder_snippet(cap, trait_);
|
||||
}
|
||||
|
||||
if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) {
|
||||
edit.add_tabstop_after_token(cap, l_curly);
|
||||
}
|
||||
}
|
||||
|
||||
insert_impl(impl_, &edit.make_mut(nominal));
|
||||
|
|
@ -278,7 +282,7 @@ mod tests {
|
|||
r#"
|
||||
struct Foo {}
|
||||
|
||||
impl ${0:_} for Foo {}
|
||||
impl ${1:_} for Foo {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
@ -293,7 +297,7 @@ mod tests {
|
|||
r#"
|
||||
struct Foo<T: Clone> {}
|
||||
|
||||
impl<T: Clone> ${0:_} for Foo<T> {}
|
||||
impl<T: Clone> ${1:_} for Foo<T> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
@ -308,7 +312,7 @@ mod tests {
|
|||
r#"
|
||||
struct Foo<'a, T: Foo<'a>> {}
|
||||
|
||||
impl<'a, T: Foo<'a>> ${0:_} for Foo<'a, T> {}
|
||||
impl<'a, T: Foo<'a>> ${1:_} for Foo<'a, T> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
@ -326,7 +330,7 @@ mod tests {
|
|||
struct Foo<'a, T: Foo<'a>> {}
|
||||
|
||||
#[cfg(feature = "foo")]
|
||||
impl<'a, T: Foo<'a>> ${0:_} for Foo<'a, T> {}
|
||||
impl<'a, T: Foo<'a>> ${1:_} for Foo<'a, T> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
@ -341,7 +345,7 @@ mod tests {
|
|||
r#"
|
||||
struct Defaulted<T = i32> {}
|
||||
|
||||
impl<T> ${0:_} for Defaulted<T> {}
|
||||
impl<T> ${1:_} for Defaulted<T> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
@ -356,7 +360,7 @@ mod tests {
|
|||
r#"
|
||||
struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}
|
||||
|
||||
impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> ${0:_} for Defaulted<'a, 'b, T, S> {}
|
||||
impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> ${1:_} for Defaulted<'a, 'b, T, S> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
@ -371,7 +375,7 @@ mod tests {
|
|||
r#"
|
||||
struct Defaulted<const N: i32 = 0> {}
|
||||
|
||||
impl<const N: i32> ${0:_} for Defaulted<N> {}
|
||||
impl<const N: i32> ${1:_} for Defaulted<N> {$0}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
|
@ -398,10 +402,10 @@ mod tests {
|
|||
inner: T,
|
||||
}
|
||||
|
||||
impl<T> ${0:_} for Struct<T>
|
||||
impl<T> ${1:_} for Struct<T>
|
||||
where
|
||||
T: Trait,
|
||||
{
|
||||
{$0
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
|
@ -476,7 +480,7 @@ mod tests {
|
|||
mod foo {
|
||||
struct Bar {}
|
||||
|
||||
impl ${0:_} for Bar {}
|
||||
impl ${1:_} for Bar {$0}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -39,25 +39,10 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
// FIXME: Add support to handle type aliases for builtin scalar types.
|
||||
validate_type_recursively(ctx, Some(&konst_ty), false, fuel)?;
|
||||
|
||||
let expr = konst.value(ctx.sema.db)?;
|
||||
|
||||
let value = match expr {
|
||||
ast::Expr::BlockExpr(_)
|
||||
| ast::Expr::Literal(_)
|
||||
| ast::Expr::RefExpr(_)
|
||||
| ast::Expr::ArrayExpr(_)
|
||||
| ast::Expr::TupleExpr(_)
|
||||
| ast::Expr::IfExpr(_)
|
||||
| ast::Expr::ParenExpr(_)
|
||||
| ast::Expr::MatchExpr(_)
|
||||
| ast::Expr::MacroExpr(_)
|
||||
| ast::Expr::BinExpr(_)
|
||||
| ast::Expr::CallExpr(_) => konst
|
||||
.eval(ctx.sema.db)
|
||||
.ok()?
|
||||
.render(ctx.sema.db, konst.krate(ctx.sema.db).edition(ctx.sema.db)),
|
||||
_ => return None,
|
||||
};
|
||||
let value = konst
|
||||
.eval(ctx.sema.db)
|
||||
.ok()?
|
||||
.render(ctx.sema.db, konst.krate(ctx.sema.db).edition(ctx.sema.db));
|
||||
|
||||
let id = AssistId("inline_const_as_literal", AssistKind::RefactorInline);
|
||||
|
||||
|
|
|
|||
|
|
@ -73,39 +73,12 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
}
|
||||
let usage_node =
|
||||
name_ref.syntax().ancestors().find(|it| ast::PathExpr::can_cast(it.kind()));
|
||||
let usage_parent_option =
|
||||
usage_node.and_then(|it| it.parent()).and_then(ast::Expr::cast);
|
||||
let usage_parent_option = usage_node.and_then(|it| it.parent());
|
||||
let usage_parent = match usage_parent_option {
|
||||
Some(u) => u,
|
||||
None => return Some((range, name_ref, false)),
|
||||
};
|
||||
let initializer = matches!(
|
||||
initializer_expr,
|
||||
ast::Expr::CallExpr(_)
|
||||
| ast::Expr::IndexExpr(_)
|
||||
| ast::Expr::MethodCallExpr(_)
|
||||
| ast::Expr::FieldExpr(_)
|
||||
| ast::Expr::TryExpr(_)
|
||||
| ast::Expr::Literal(_)
|
||||
| ast::Expr::TupleExpr(_)
|
||||
| ast::Expr::ArrayExpr(_)
|
||||
| ast::Expr::ParenExpr(_)
|
||||
| ast::Expr::PathExpr(_)
|
||||
| ast::Expr::BlockExpr(_),
|
||||
);
|
||||
let parent = matches!(
|
||||
usage_parent,
|
||||
ast::Expr::TupleExpr(_)
|
||||
| ast::Expr::ArrayExpr(_)
|
||||
| ast::Expr::ParenExpr(_)
|
||||
| ast::Expr::ForExpr(_)
|
||||
| ast::Expr::WhileExpr(_)
|
||||
| ast::Expr::BreakExpr(_)
|
||||
| ast::Expr::ReturnExpr(_)
|
||||
| ast::Expr::MatchExpr(_)
|
||||
| ast::Expr::BlockExpr(_)
|
||||
);
|
||||
Some((range, name_ref, !(initializer || parent)))
|
||||
Some((range, name_ref, initializer_expr.needs_parens_in(&usage_parent)))
|
||||
})
|
||||
.collect::<Option<Vec<_>>>()?;
|
||||
|
||||
|
|
@ -281,11 +254,11 @@ fn foo() {
|
|||
r"
|
||||
fn bar(a: usize) {}
|
||||
fn foo() {
|
||||
(1 + 1) + 1;
|
||||
if (1 + 1) > 10 {
|
||||
1 + 1 + 1;
|
||||
if 1 + 1 > 10 {
|
||||
}
|
||||
|
||||
while (1 + 1) > 10 {
|
||||
while 1 + 1 > 10 {
|
||||
|
||||
}
|
||||
let b = (1 + 1) * 10;
|
||||
|
|
@ -350,14 +323,14 @@ fn foo() {
|
|||
r"
|
||||
fn bar(a: usize) -> usize { a }
|
||||
fn foo() {
|
||||
(bar(1) as u64) + 1;
|
||||
if (bar(1) as u64) > 10 {
|
||||
bar(1) as u64 + 1;
|
||||
if bar(1) as u64 > 10 {
|
||||
}
|
||||
|
||||
while (bar(1) as u64) > 10 {
|
||||
while bar(1) as u64 > 10 {
|
||||
|
||||
}
|
||||
let b = (bar(1) as u64) * 10;
|
||||
let b = bar(1) as u64 * 10;
|
||||
bar(bar(1) as u64);
|
||||
}",
|
||||
);
|
||||
|
|
@ -574,7 +547,7 @@ fn foo() {
|
|||
r"
|
||||
fn foo() {
|
||||
let bar = 10;
|
||||
let b = (&bar) * 10;
|
||||
let b = &bar * 10;
|
||||
}",
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -276,7 +276,7 @@ impl ConstAndTypeMap {
|
|||
/// 1. Map the provided instance's generic args to the type alias's generic
|
||||
/// params:
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// type A<'a, const N: usize, T = u64> = &'a [T; N];
|
||||
/// ^ alias generic params
|
||||
/// let a: A<100>;
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ use syntax::{
|
|||
|
||||
use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
utils::invert_boolean_expression,
|
||||
utils::invert_boolean_expression_legacy,
|
||||
AssistId, AssistKind,
|
||||
};
|
||||
|
||||
|
|
@ -48,7 +48,7 @@ pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
|
|||
};
|
||||
|
||||
acc.add(AssistId("invert_if", AssistKind::RefactorRewrite), "Invert if", if_range, |edit| {
|
||||
let flip_cond = invert_boolean_expression(cond.clone());
|
||||
let flip_cond = invert_boolean_expression_legacy(cond.clone());
|
||||
edit.replace_ast(cond, flip_cond);
|
||||
|
||||
let else_node = else_block.syntax();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use syntax::{SyntaxKind, TextRange, T};
|
||||
use syntax::{SyntaxKind, T};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
|
|
@ -19,11 +19,6 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
|
|||
// ```
|
||||
pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let mut_token = ctx.find_token_syntax_at_offset(T![mut])?;
|
||||
let delete_from = mut_token.text_range().start();
|
||||
let delete_to = match mut_token.next_token() {
|
||||
Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(),
|
||||
_ => mut_token.text_range().end(),
|
||||
};
|
||||
|
||||
let target = mut_token.text_range();
|
||||
acc.add(
|
||||
|
|
@ -31,7 +26,13 @@ pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
|
|||
"Remove `mut` keyword",
|
||||
target,
|
||||
|builder| {
|
||||
builder.delete(TextRange::new(delete_from, delete_to));
|
||||
let mut editor = builder.make_editor(&mut_token.parent().unwrap());
|
||||
match mut_token.next_token() {
|
||||
Some(it) if it.kind() == SyntaxKind::WHITESPACE => editor.delete(it),
|
||||
_ => (),
|
||||
}
|
||||
editor.delete(mut_token);
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
use syntax::{ast, AstNode, SyntaxKind, T};
|
||||
use syntax::{
|
||||
ast::{self, syntax_factory::SyntaxFactory},
|
||||
syntax_editor::Position,
|
||||
AstNode, SyntaxKind, T,
|
||||
};
|
||||
|
||||
use crate::{AssistContext, AssistId, AssistKind, Assists};
|
||||
|
||||
|
|
@ -30,7 +34,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
|||
let expr = parens.expr()?;
|
||||
|
||||
let parent = parens.syntax().parent()?;
|
||||
if expr.needs_parens_in(parent) {
|
||||
if expr.needs_parens_in(&parent) {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
@ -40,6 +44,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
|||
"Remove redundant parentheses",
|
||||
target,
|
||||
|builder| {
|
||||
let mut editor = builder.make_editor(parens.syntax());
|
||||
let prev_token = parens.syntax().first_token().and_then(|it| it.prev_token());
|
||||
let need_to_add_ws = match prev_token {
|
||||
Some(it) => {
|
||||
|
|
@ -48,9 +53,13 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
|||
}
|
||||
None => false,
|
||||
};
|
||||
let expr = if need_to_add_ws { format!(" {expr}") } else { expr.to_string() };
|
||||
|
||||
builder.replace(parens.syntax().text_range(), expr)
|
||||
if need_to_add_ws {
|
||||
let make = SyntaxFactory::new();
|
||||
editor.insert(Position::before(parens.syntax()), make.whitespace(" "));
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
}
|
||||
editor.replace(parens.syntax(), expr.syntax());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
use ide_db::{defs::Definition, search::FileReference, EditionedFileId};
|
||||
use syntax::{
|
||||
algo::find_node_at_range,
|
||||
algo::{find_node_at_range, least_common_ancestor_element},
|
||||
ast::{self, HasArgList},
|
||||
AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, T,
|
||||
syntax_editor::Element,
|
||||
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, T,
|
||||
};
|
||||
|
||||
use SyntaxKind::WHITESPACE;
|
||||
|
|
@ -74,15 +75,21 @@ pub(crate) fn remove_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
|||
cov_mark::hit!(keep_used);
|
||||
return None;
|
||||
}
|
||||
let parent = param.syntax().parent()?;
|
||||
acc.add(
|
||||
AssistId("remove_unused_param", AssistKind::Refactor),
|
||||
"Remove unused parameter",
|
||||
param.syntax().text_range(),
|
||||
|builder| {
|
||||
builder.delete(range_to_remove(param.syntax()));
|
||||
let mut editor = builder.make_editor(&parent);
|
||||
let elements = elements_to_remove(param.syntax());
|
||||
for element in elements {
|
||||
editor.delete(element);
|
||||
}
|
||||
for (file_id, references) in fn_def.usages(&ctx.sema).all() {
|
||||
process_usages(ctx, builder, file_id, references, param_position, is_self_present);
|
||||
}
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -96,20 +103,24 @@ fn process_usages(
|
|||
is_self_present: bool,
|
||||
) {
|
||||
let source_file = ctx.sema.parse(file_id);
|
||||
builder.edit_file(file_id);
|
||||
let possible_ranges = references
|
||||
.into_iter()
|
||||
.filter_map(|usage| process_usage(&source_file, usage, arg_to_remove, is_self_present));
|
||||
|
||||
let mut ranges_to_delete: Vec<TextRange> = vec![];
|
||||
for range in possible_ranges {
|
||||
if !ranges_to_delete.iter().any(|it| it.contains_range(range)) {
|
||||
ranges_to_delete.push(range)
|
||||
for element_range in possible_ranges {
|
||||
let Some(SyntaxElement::Node(parent)) = element_range
|
||||
.iter()
|
||||
.cloned()
|
||||
.reduce(|a, b| least_common_ancestor_element(&a, &b).unwrap().syntax_element())
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let mut editor = builder.make_editor(&parent);
|
||||
for element in element_range {
|
||||
editor.delete(element);
|
||||
}
|
||||
}
|
||||
|
||||
for range in ranges_to_delete {
|
||||
builder.delete(range)
|
||||
builder.add_file_edits(file_id, editor);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -118,7 +129,7 @@ fn process_usage(
|
|||
FileReference { range, .. }: FileReference,
|
||||
mut arg_to_remove: usize,
|
||||
is_self_present: bool,
|
||||
) -> Option<TextRange> {
|
||||
) -> Option<Vec<SyntaxElement>> {
|
||||
let call_expr_opt: Option<ast::CallExpr> = find_node_at_range(source_file.syntax(), range);
|
||||
if let Some(call_expr) = call_expr_opt {
|
||||
let call_expr_range = call_expr.expr()?.syntax().text_range();
|
||||
|
|
@ -127,7 +138,7 @@ fn process_usage(
|
|||
}
|
||||
|
||||
let arg = call_expr.arg_list()?.args().nth(arg_to_remove)?;
|
||||
return Some(range_to_remove(arg.syntax()));
|
||||
return Some(elements_to_remove(arg.syntax()));
|
||||
}
|
||||
|
||||
let method_call_expr_opt: Option<ast::MethodCallExpr> =
|
||||
|
|
@ -143,7 +154,7 @@ fn process_usage(
|
|||
}
|
||||
|
||||
let arg = method_call_expr.arg_list()?.args().nth(arg_to_remove)?;
|
||||
return Some(range_to_remove(arg.syntax()));
|
||||
return Some(elements_to_remove(arg.syntax()));
|
||||
}
|
||||
|
||||
None
|
||||
|
|
@ -174,6 +185,29 @@ pub(crate) fn range_to_remove(node: &SyntaxNode) -> TextRange {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn elements_to_remove(node: &SyntaxNode) -> Vec<SyntaxElement> {
|
||||
let up_to_comma = next_prev().find_map(|dir| {
|
||||
node.siblings_with_tokens(dir)
|
||||
.filter_map(|it| it.into_token())
|
||||
.find(|it| it.kind() == T![,])
|
||||
.map(|it| (dir, it))
|
||||
});
|
||||
if let Some((dir, token)) = up_to_comma {
|
||||
let after = token.siblings_with_tokens(dir).nth(1).unwrap();
|
||||
let mut result: Vec<_> =
|
||||
node.siblings_with_tokens(dir).take_while(|it| it != &after).collect();
|
||||
if node.next_sibling().is_some() {
|
||||
result.extend(
|
||||
token.siblings_with_tokens(dir).skip(1).take_while(|it| it.kind() == WHITESPACE),
|
||||
);
|
||||
}
|
||||
|
||||
result
|
||||
} else {
|
||||
vec![node.syntax_element()]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
use ide_db::imports::insert_use::ImportScope;
|
||||
use syntax::{
|
||||
ast::{self, make, AstNode, HasArgList},
|
||||
ast::{self, prec::ExprPrecedence, AstNode, HasArgList},
|
||||
TextRange,
|
||||
};
|
||||
|
||||
|
|
@ -55,7 +55,7 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
|
|||
TextRange::new(path.syntax().text_range().start(), l_paren.text_range().end());
|
||||
|
||||
// Parens around `expr` if needed
|
||||
let parens = needs_parens_as_receiver(&first_arg).then(|| {
|
||||
let parens = first_arg.precedence().needs_parentheses_in(ExprPrecedence::Postfix).then(|| {
|
||||
let range = first_arg.syntax().text_range();
|
||||
(range.start(), range.end())
|
||||
});
|
||||
|
|
@ -124,24 +124,6 @@ fn add_import(
|
|||
}
|
||||
}
|
||||
|
||||
fn needs_parens_as_receiver(expr: &ast::Expr) -> bool {
|
||||
// Make `(expr).dummy()`
|
||||
let dummy_call = make::expr_method_call(
|
||||
make::expr_paren(expr.clone()),
|
||||
make::name_ref("dummy"),
|
||||
make::arg_list([]),
|
||||
);
|
||||
|
||||
// Get the `expr` clone with the right parent back
|
||||
// (unreachable!s are fine since we've just constructed the expression)
|
||||
let ast::Expr::MethodCallExpr(call) = &dummy_call else { unreachable!() };
|
||||
let Some(receiver) = call.receiver() else { unreachable!() };
|
||||
let ast::Expr::ParenExpr(parens) = receiver else { unreachable!() };
|
||||
let Some(expr) = parens.expr() else { unreachable!() };
|
||||
|
||||
expr.needs_parens_in(dummy_call.syntax().clone())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::{check_assist, check_assist_not_applicable};
|
||||
|
|
|
|||
|
|
@ -149,6 +149,7 @@ mod handlers {
|
|||
mod fix_visibility;
|
||||
mod flip_binexpr;
|
||||
mod flip_comma;
|
||||
mod flip_or_pattern;
|
||||
mod flip_trait_bound;
|
||||
mod generate_constant;
|
||||
mod generate_default_from_enum_variant;
|
||||
|
|
@ -279,6 +280,7 @@ mod handlers {
|
|||
fix_visibility::fix_visibility,
|
||||
flip_binexpr::flip_binexpr,
|
||||
flip_comma::flip_comma,
|
||||
flip_or_pattern::flip_or_pattern,
|
||||
flip_trait_bound::flip_trait_bound,
|
||||
generate_constant::generate_constant,
|
||||
generate_default_from_enum_variant::generate_default_from_enum_variant,
|
||||
|
|
|
|||
|
|
@ -1195,6 +1195,23 @@ fn main() {
|
|||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_flip_or_pattern() {
|
||||
check_doc_test(
|
||||
"flip_or_pattern",
|
||||
r#####"
|
||||
fn foo() {
|
||||
let (a |$0 b) = 1;
|
||||
}
|
||||
"#####,
|
||||
r#####"
|
||||
fn foo() {
|
||||
let (b | a) = 1;
|
||||
}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_flip_trait_bound() {
|
||||
check_doc_test(
|
||||
|
|
@ -1961,7 +1978,7 @@ struct Ctx<T: Clone> {
|
|||
data: T,
|
||||
}
|
||||
|
||||
impl<T: Clone> ${0:_} for Ctx<T> {}
|
||||
impl<T: Clone> ${1:_} for Ctx<T> {$0}
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,7 +17,9 @@ use syntax::{
|
|||
self,
|
||||
edit::{AstNodeEdit, IndentLevel},
|
||||
edit_in_place::{AttrsOwnerEdit, Indent, Removable},
|
||||
make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
|
||||
make,
|
||||
syntax_factory::SyntaxFactory,
|
||||
HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
|
||||
},
|
||||
ted, AstNode, AstToken, Direction, Edition, NodeOrToken, SourceFile,
|
||||
SyntaxKind::*,
|
||||
|
|
@ -245,11 +247,79 @@ pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize {
|
|||
.unwrap_or_else(|| node.text_range().start())
|
||||
}
|
||||
|
||||
pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr {
|
||||
invert_special_case(&expr).unwrap_or_else(|| make::expr_prefix(T![!], expr).into())
|
||||
pub(crate) fn invert_boolean_expression(make: &SyntaxFactory, expr: ast::Expr) -> ast::Expr {
|
||||
invert_special_case(make, &expr).unwrap_or_else(|| make.expr_prefix(T![!], expr).into())
|
||||
}
|
||||
|
||||
fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
|
||||
// FIXME: Migrate usages of this function to the above function and remove this.
|
||||
pub(crate) fn invert_boolean_expression_legacy(expr: ast::Expr) -> ast::Expr {
|
||||
invert_special_case_legacy(&expr).unwrap_or_else(|| make::expr_prefix(T![!], expr).into())
|
||||
}
|
||||
|
||||
fn invert_special_case(make: &SyntaxFactory, expr: &ast::Expr) -> Option<ast::Expr> {
|
||||
match expr {
|
||||
ast::Expr::BinExpr(bin) => {
|
||||
let op_kind = bin.op_kind()?;
|
||||
let rev_kind = match op_kind {
|
||||
ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated }) => {
|
||||
ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: !negated })
|
||||
}
|
||||
ast::BinaryOp::CmpOp(ast::CmpOp::Ord { ordering: ast::Ordering::Less, strict }) => {
|
||||
ast::BinaryOp::CmpOp(ast::CmpOp::Ord {
|
||||
ordering: ast::Ordering::Greater,
|
||||
strict: !strict,
|
||||
})
|
||||
}
|
||||
ast::BinaryOp::CmpOp(ast::CmpOp::Ord {
|
||||
ordering: ast::Ordering::Greater,
|
||||
strict,
|
||||
}) => ast::BinaryOp::CmpOp(ast::CmpOp::Ord {
|
||||
ordering: ast::Ordering::Less,
|
||||
strict: !strict,
|
||||
}),
|
||||
// Parenthesize other expressions before prefixing `!`
|
||||
_ => {
|
||||
return Some(
|
||||
make.expr_prefix(T![!], make.expr_paren(expr.clone()).into()).into(),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
Some(make.expr_bin(bin.lhs()?, rev_kind, bin.rhs()?).into())
|
||||
}
|
||||
ast::Expr::MethodCallExpr(mce) => {
|
||||
let receiver = mce.receiver()?;
|
||||
let method = mce.name_ref()?;
|
||||
let arg_list = mce.arg_list()?;
|
||||
|
||||
let method = match method.text().as_str() {
|
||||
"is_some" => "is_none",
|
||||
"is_none" => "is_some",
|
||||
"is_ok" => "is_err",
|
||||
"is_err" => "is_ok",
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(make.expr_method_call(receiver, make.name_ref(method), arg_list).into())
|
||||
}
|
||||
ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::UnaryOp::Not => match pe.expr()? {
|
||||
ast::Expr::ParenExpr(parexpr) => {
|
||||
parexpr.expr().map(|e| e.clone_subtree().clone_for_update())
|
||||
}
|
||||
_ => pe.expr().map(|e| e.clone_subtree().clone_for_update()),
|
||||
},
|
||||
ast::Expr::Literal(lit) => match lit.kind() {
|
||||
ast::LiteralKind::Bool(b) => match b {
|
||||
true => Some(ast::Expr::Literal(make.expr_literal("false"))),
|
||||
false => Some(ast::Expr::Literal(make.expr_literal("true"))),
|
||||
},
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> {
|
||||
match expr {
|
||||
ast::Expr::BinExpr(bin) => {
|
||||
let bin = bin.clone_for_update();
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
|
|
|||
|
|
@ -147,7 +147,10 @@ pub(crate) fn complete_expr_path(
|
|||
});
|
||||
match resolution {
|
||||
hir::PathResolution::Def(hir::ModuleDef::Module(module)) => {
|
||||
let module_scope = module.scope(ctx.db, Some(ctx.module));
|
||||
// Set visible_from to None so private items are returned.
|
||||
// They will be possibly filtered out in add_path_resolution()
|
||||
// via def_is_visible().
|
||||
let module_scope = module.scope(ctx.db, None);
|
||||
for (name, def) in module_scope {
|
||||
if scope_def_applicable(def) {
|
||||
acc.add_path_resolution(
|
||||
|
|
|
|||
|
|
@ -249,8 +249,8 @@ pub(crate) enum Qualified {
|
|||
/// This would be None, if path is not solely made of
|
||||
/// `super` segments, e.g.
|
||||
///
|
||||
/// ```rust
|
||||
/// use super::foo;
|
||||
/// ```ignore
|
||||
/// use super::foo;
|
||||
/// ```
|
||||
///
|
||||
/// Otherwise it should be Some(count of `super`)
|
||||
|
|
|
|||
|
|
@ -97,7 +97,8 @@ fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Opt
|
|||
/// We do this by recursively expanding all macros and picking the best possible match. We cannot just
|
||||
/// choose the first expansion each time because macros can expand to something that does not include
|
||||
/// our completion marker, e.g.:
|
||||
/// ```
|
||||
///
|
||||
/// ```ignore
|
||||
/// macro_rules! helper { ($v:ident) => {} }
|
||||
/// macro_rules! my_macro {
|
||||
/// ($v:ident) => {
|
||||
|
|
@ -106,7 +107,7 @@ fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Opt
|
|||
/// };
|
||||
/// }
|
||||
///
|
||||
/// my_macro!(complete_me_here)
|
||||
/// my_macro!(complete_me_here);
|
||||
/// ```
|
||||
/// If we would expand the first thing we encounter only (which in fact this method used to do), we would
|
||||
/// be unable to complete here, because we would be walking directly into the void. So we instead try
|
||||
|
|
|
|||
|
|
@ -149,9 +149,9 @@ pub struct CompletionRelevance {
|
|||
/// This is set when the identifier being completed matches up with the name that is expected,
|
||||
/// like in a function argument.
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// fn f(spam: String) {}
|
||||
/// fn main {
|
||||
/// fn main() {
|
||||
/// let spam = 92;
|
||||
/// f($0) // name of local matches the name of param
|
||||
/// }
|
||||
|
|
@ -161,7 +161,7 @@ pub struct CompletionRelevance {
|
|||
pub type_match: Option<CompletionRelevanceTypeMatch>,
|
||||
/// Set for local variables.
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// fn foo(a: u32) {
|
||||
/// let b = 0;
|
||||
/// $0 // `a` and `b` are local
|
||||
|
|
@ -195,7 +195,7 @@ pub struct CompletionRelevanceTraitInfo {
|
|||
pub enum CompletionRelevanceTypeMatch {
|
||||
/// This is set in cases like these:
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// enum Option<T> { Some(T), None }
|
||||
/// fn f(a: Option<u32>) {}
|
||||
/// fn main {
|
||||
|
|
@ -205,9 +205,9 @@ pub enum CompletionRelevanceTypeMatch {
|
|||
CouldUnify,
|
||||
/// This is set in cases where the type matches the expected type, like:
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// fn f(spam: String) {}
|
||||
/// fn main {
|
||||
/// fn main() {
|
||||
/// let foo = String::new();
|
||||
/// f($0) // type of local matches the type of param
|
||||
/// }
|
||||
|
|
@ -221,7 +221,7 @@ pub enum CompletionRelevancePostfixMatch {
|
|||
NonExact,
|
||||
/// This is set in cases like these:
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// (a > b).not$0
|
||||
/// ```
|
||||
///
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ impl CompletionFieldsToResolve {
|
|||
/// already present, it should give all possible variants for the identifier at
|
||||
/// the caret. In other words, for
|
||||
///
|
||||
/// ```no_run
|
||||
/// ```ignore
|
||||
/// fn f() {
|
||||
/// let foo = 92;
|
||||
/// let _ = bar$0
|
||||
|
|
|
|||
|
|
@ -628,11 +628,9 @@ fn compute_ref_match(
|
|||
let expected_type = ctx.expected_type.as_ref()?;
|
||||
let expected_without_ref = expected_type.remove_ref();
|
||||
let completion_without_ref = completion_ty.remove_ref();
|
||||
|
||||
if completion_ty == expected_type {
|
||||
if expected_type.could_unify_with(ctx.db, completion_ty) {
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(expected_without_ref) = &expected_without_ref {
|
||||
if completion_ty.autoderef(ctx.db).any(|ty| ty == *expected_without_ref) {
|
||||
cov_mark::hit!(suggest_ref);
|
||||
|
|
@ -2007,6 +2005,30 @@ fn f() {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_avoid_redundant_suggestion() {
|
||||
check_relevance(
|
||||
r#"
|
||||
struct aa([u8]);
|
||||
|
||||
impl aa {
|
||||
fn from_bytes(bytes: &[u8]) -> &Self {
|
||||
unsafe { &*(bytes as *const [u8] as *const aa) }
|
||||
}
|
||||
}
|
||||
|
||||
fn bb()-> &'static aa {
|
||||
let bytes = b"hello";
|
||||
aa::$0
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
ex bb() [type]
|
||||
fn from_bytes(…) fn(&[u8]) -> &aa [type_could_unify]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn suggest_ref_mut() {
|
||||
cov_mark::check!(suggest_ref);
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
|
|
|||
|
|
@ -252,10 +252,10 @@ pub enum SymbolKind {
|
|||
impl From<hir::MacroKind> for SymbolKind {
|
||||
fn from(it: hir::MacroKind) -> Self {
|
||||
match it {
|
||||
hir::MacroKind::Declarative | hir::MacroKind::BuiltIn => SymbolKind::Macro,
|
||||
hir::MacroKind::Declarative | hir::MacroKind::DeclarativeBuiltIn => SymbolKind::Macro,
|
||||
hir::MacroKind::ProcMacro => SymbolKind::ProcMacro,
|
||||
hir::MacroKind::Derive => SymbolKind::Derive,
|
||||
hir::MacroKind::Attr => SymbolKind::Attribute,
|
||||
hir::MacroKind::Derive | hir::MacroKind::DeriveBuiltIn => SymbolKind::Derive,
|
||||
hir::MacroKind::Attr | hir::MacroKind::AttrBuiltIn => SymbolKind::Attribute,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ type DefaultedParam = Either<hir::TypeParam, hir::ConstParam>;
|
|||
/// block), you generally want to appropriately qualify the names, and sometimes
|
||||
/// you might want to substitute generic parameters as well:
|
||||
///
|
||||
/// ```
|
||||
/// ```ignore
|
||||
/// mod x {
|
||||
/// pub struct A<V>;
|
||||
/// pub trait T<U> { fn foo(&self, _: U) -> A<U>; }
|
||||
|
|
|
|||
|
|
@ -373,7 +373,9 @@ impl Definition {
|
|||
SearchScope::krate(db, module.krate())
|
||||
}
|
||||
}
|
||||
hir::MacroKind::BuiltIn => SearchScope::crate_graph(db),
|
||||
hir::MacroKind::AttrBuiltIn
|
||||
| hir::MacroKind::DeriveBuiltIn
|
||||
| hir::MacroKind::DeclarativeBuiltIn => SearchScope::crate_graph(db),
|
||||
hir::MacroKind::Derive | hir::MacroKind::Attr | hir::MacroKind::ProcMacro => {
|
||||
SearchScope::reverse_dependencies(db, module.krate())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -493,7 +493,7 @@ pub enum Snippet {
|
|||
Placeholder(TextRange),
|
||||
/// A group of placeholder snippets, e.g.
|
||||
///
|
||||
/// ```no_run
|
||||
/// ```ignore
|
||||
/// let ${0:new_var} = 4;
|
||||
/// fun(1, 2, 3, ${0:new_var});
|
||||
/// ```
|
||||
|
|
|
|||
|
|
@ -79,7 +79,9 @@ const USELESS_METHODS: &[&str] = &[
|
|||
/// the name, e.g. `a`, `a1`, `a2`, ...
|
||||
///
|
||||
/// # Examples
|
||||
/// ```rust
|
||||
///
|
||||
/// ```
|
||||
/// # use ide_db::syntax_helpers::suggest_name::NameGenerator;
|
||||
/// let mut generator = NameGenerator::new();
|
||||
/// assert_eq!(generator.suggest_name("a"), "a");
|
||||
/// assert_eq!(generator.suggest_name("a"), "a1");
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
|
|
|||
|
|
@ -854,7 +854,7 @@ fn main() {
|
|||
#[target_feature(enable = "avx")]
|
||||
fn foo() {}
|
||||
|
||||
#[target_feature(enable = "avx,avx2")]
|
||||
#[target_feature(enable = "avx2")]
|
||||
fn bar() {
|
||||
foo();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -697,7 +697,7 @@ struct SeverityAttr {
|
|||
/// #[warn(non_snake_case)]
|
||||
/// mod foo {
|
||||
/// #[allow(nonstandard_style)]
|
||||
/// mod bar;
|
||||
/// mod bar {}
|
||||
/// }
|
||||
/// ```
|
||||
/// We want to not warn on non snake case inside `bar`. If we are traversing this for the first
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
cov-mark = "2.0.0-pre.1"
|
||||
|
|
|
|||
|
|
@ -379,13 +379,15 @@ fn rewrite_intra_doc_link(
|
|||
let resolved = resolve_doc_path_for_def(db, def, link, ns)?;
|
||||
let mut url = get_doc_base_urls(db, resolved, None, None).0?;
|
||||
|
||||
let (_, file, _) = filename_and_frag_for_def(db, resolved)?;
|
||||
let (_, file, frag) = filename_and_frag_for_def(db, resolved)?;
|
||||
if let Some(path) = mod_path_of_def(db, resolved) {
|
||||
url = url.join(&path).ok()?;
|
||||
}
|
||||
|
||||
let frag = anchor.or(frag.as_deref());
|
||||
|
||||
url = url.join(&file).ok()?;
|
||||
url.set_fragment(anchor);
|
||||
url.set_fragment(frag);
|
||||
|
||||
Some((url.into(), strip_prefixes_suffixes(title).to_owned()))
|
||||
}
|
||||
|
|
@ -621,11 +623,9 @@ fn filename_and_frag_for_def(
|
|||
format!("fn.{}.html", f.name(db).as_str())
|
||||
}
|
||||
Definition::Variant(ev) => {
|
||||
format!(
|
||||
"enum.{}.html#variant.{}",
|
||||
ev.parent_enum(db).name(db).as_str(),
|
||||
ev.name(db).as_str()
|
||||
)
|
||||
let def = Definition::Adt(ev.parent_enum(db).into());
|
||||
let (_, file, _) = filename_and_frag_for_def(db, def)?;
|
||||
return Some((def, file, Some(format!("variant.{}", ev.name(db).as_str()))));
|
||||
}
|
||||
Definition::Const(c) => {
|
||||
format!("const.{}.html", c.name(db)?.as_str())
|
||||
|
|
@ -635,12 +635,13 @@ fn filename_and_frag_for_def(
|
|||
}
|
||||
Definition::Macro(mac) => match mac.kind(db) {
|
||||
hir::MacroKind::Declarative
|
||||
| hir::MacroKind::BuiltIn
|
||||
| hir::MacroKind::AttrBuiltIn
|
||||
| hir::MacroKind::DeclarativeBuiltIn
|
||||
| hir::MacroKind::Attr
|
||||
| hir::MacroKind::ProcMacro => {
|
||||
format!("macro.{}.html", mac.name(db).as_str())
|
||||
}
|
||||
hir::MacroKind::Derive => {
|
||||
hir::MacroKind::Derive | hir::MacroKind::DeriveBuiltIn => {
|
||||
format!("derive.{}.html", mac.name(db).as_str())
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -686,3 +686,95 @@ fn rewrite_intra_doc_link_with_anchor() {
|
|||
expect"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rewrite_intra_doc_link_to_associated_item() {
|
||||
check_rewrite(
|
||||
r#"
|
||||
//- /main.rs crate:foo
|
||||
/// [Foo::bar]
|
||||
pub struct $0Foo;
|
||||
|
||||
impl Foo {
|
||||
fn bar() {}
|
||||
}
|
||||
"#,
|
||||
expect"#]],
|
||||
);
|
||||
check_rewrite(
|
||||
r#"
|
||||
//- /main.rs crate:foo
|
||||
/// [Foo::bar]
|
||||
pub struct $0Foo {
|
||||
bar: ()
|
||||
}
|
||||
"#,
|
||||
expect"#]],
|
||||
);
|
||||
check_rewrite(
|
||||
r#"
|
||||
//- /main.rs crate:foo
|
||||
/// [Foo::Bar]
|
||||
pub enum $0Foo {
|
||||
Bar
|
||||
}
|
||||
"#,
|
||||
expect"#]],
|
||||
);
|
||||
check_rewrite(
|
||||
r#"
|
||||
//- /main.rs crate:foo
|
||||
/// [Foo::BAR]
|
||||
pub struct $0Foo;
|
||||
|
||||
impl Foo {
|
||||
const BAR: () = ();
|
||||
}
|
||||
"#,
|
||||
expect"#
|
||||
]],
|
||||
);
|
||||
check_rewrite(
|
||||
r#"
|
||||
//- /main.rs crate:foo
|
||||
/// [Foo::bar]
|
||||
pub trait $0Foo {
|
||||
fn bar();
|
||||
}
|
||||
"#,
|
||||
expect"#]],
|
||||
);
|
||||
check_rewrite(
|
||||
r#"
|
||||
//- /main.rs crate:foo
|
||||
/// [Foo::Bar]
|
||||
pub trait $0Foo {
|
||||
type Bar;
|
||||
}
|
||||
"#,
|
||||
expect"#]],
|
||||
);
|
||||
check_rewrite(
|
||||
r#"
|
||||
//- /main.rs crate:foo
|
||||
/// [Foo::bar#anchor]
|
||||
pub struct $0Foo {
|
||||
bar: (),
|
||||
}
|
||||
"#,
|
||||
expect"#]],
|
||||
);
|
||||
check_rewrite(
|
||||
r#"
|
||||
//- /main.rs crate:foo
|
||||
/// [method](Foo::bar)
|
||||
pub struct $0Foo;
|
||||
|
||||
impl Foo {
|
||||
fn bar() {}
|
||||
}
|
||||
"#,
|
||||
expect"#]],
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ pub struct HoverConfig {
|
|||
pub max_fields_count: Option<usize>,
|
||||
pub max_enum_variants_count: Option<usize>,
|
||||
pub max_subst_ty_len: SubstTyLen,
|
||||
pub show_drop_glue: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ use std::{env, mem, ops::Not};
|
|||
|
||||
use either::Either;
|
||||
use hir::{
|
||||
db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, CaptureKind,
|
||||
db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, CaptureKind, DropGlue,
|
||||
DynCompatibilityViolation, HasCrate, HasSource, HirDisplay, Layout, LayoutError,
|
||||
MethodViolationCode, Name, Semantics, Symbol, Trait, Type, TypeInfo, VariantDef,
|
||||
};
|
||||
|
|
@ -629,6 +629,89 @@ pub(super) fn definition(
|
|||
_ => None,
|
||||
};
|
||||
|
||||
let drop_info = || {
|
||||
if !config.show_drop_glue {
|
||||
return None;
|
||||
}
|
||||
let drop_info = match def {
|
||||
Definition::Field(field) => {
|
||||
DropInfo { drop_glue: field.ty(db).drop_glue(db), has_dtor: None }
|
||||
}
|
||||
Definition::Adt(Adt::Struct(strukt)) => {
|
||||
let struct_drop_glue = strukt.ty_placeholders(db).drop_glue(db);
|
||||
let mut fields_drop_glue = strukt
|
||||
.fields(db)
|
||||
.iter()
|
||||
.map(|field| field.ty(db).drop_glue(db))
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None);
|
||||
let has_dtor = match (fields_drop_glue, struct_drop_glue) {
|
||||
(DropGlue::None, _) => struct_drop_glue != DropGlue::None,
|
||||
(_, DropGlue::None) => {
|
||||
// This is `ManuallyDrop`.
|
||||
fields_drop_glue = DropGlue::None;
|
||||
false
|
||||
}
|
||||
(_, _) => struct_drop_glue > fields_drop_glue,
|
||||
};
|
||||
DropInfo { drop_glue: fields_drop_glue, has_dtor: Some(has_dtor) }
|
||||
}
|
||||
// Unions cannot have fields with drop glue.
|
||||
Definition::Adt(Adt::Union(union)) => DropInfo {
|
||||
drop_glue: DropGlue::None,
|
||||
has_dtor: Some(union.ty_placeholders(db).drop_glue(db) != DropGlue::None),
|
||||
},
|
||||
Definition::Adt(Adt::Enum(enum_)) => {
|
||||
let enum_drop_glue = enum_.ty_placeholders(db).drop_glue(db);
|
||||
let fields_drop_glue = enum_
|
||||
.variants(db)
|
||||
.iter()
|
||||
.map(|variant| {
|
||||
variant
|
||||
.fields(db)
|
||||
.iter()
|
||||
.map(|field| field.ty(db).drop_glue(db))
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None)
|
||||
})
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None);
|
||||
DropInfo {
|
||||
drop_glue: fields_drop_glue,
|
||||
has_dtor: Some(enum_drop_glue > fields_drop_glue),
|
||||
}
|
||||
}
|
||||
Definition::Variant(variant) => {
|
||||
let fields_drop_glue = variant
|
||||
.fields(db)
|
||||
.iter()
|
||||
.map(|field| field.ty(db).drop_glue(db))
|
||||
.max()
|
||||
.unwrap_or(DropGlue::None);
|
||||
DropInfo { drop_glue: fields_drop_glue, has_dtor: None }
|
||||
}
|
||||
Definition::TypeAlias(type_alias) => {
|
||||
DropInfo { drop_glue: type_alias.ty_placeholders(db).drop_glue(db), has_dtor: None }
|
||||
}
|
||||
Definition::Local(local) => {
|
||||
DropInfo { drop_glue: local.ty(db).drop_glue(db), has_dtor: None }
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
let rendered_drop_glue = match drop_info.drop_glue {
|
||||
DropGlue::None => "does not contain types with destructors (drop glue)",
|
||||
DropGlue::DependOnParams => {
|
||||
"may contain types with destructors (drop glue) depending on type parameters"
|
||||
}
|
||||
DropGlue::HasDropGlue => "contain types with destructors (drop glue)",
|
||||
};
|
||||
Some(match drop_info.has_dtor {
|
||||
Some(true) => format!("{}; has a destructor", rendered_drop_glue),
|
||||
Some(false) => format!("{}; doesn't have a destructor", rendered_drop_glue),
|
||||
None => rendered_drop_glue.to_owned(),
|
||||
})
|
||||
};
|
||||
|
||||
let dyn_compatibility_info = || match def {
|
||||
Definition::Trait(it) => {
|
||||
let mut dyn_compatibility_info = String::new();
|
||||
|
|
@ -661,6 +744,10 @@ pub(super) fn definition(
|
|||
extra.push_str("\n___\n");
|
||||
extra.push_str(&dyn_compatibility_info);
|
||||
}
|
||||
if let Some(drop_info) = drop_info() {
|
||||
extra.push_str("\n___\n");
|
||||
extra.push_str(&drop_info);
|
||||
}
|
||||
}
|
||||
let mut desc = String::new();
|
||||
desc.push_str(&label);
|
||||
|
|
@ -703,6 +790,12 @@ pub(super) fn definition(
|
|||
)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct DropInfo {
|
||||
drop_glue: DropGlue,
|
||||
has_dtor: Option<bool>,
|
||||
}
|
||||
|
||||
pub(super) fn literal(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
token: SyntaxToken,
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -258,27 +258,25 @@ fn mode_and_needs_parens_for_adjustment_hints(
|
|||
fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, bool) {
|
||||
let prec = expr.precedence();
|
||||
if postfix {
|
||||
// postfix ops have higher precedence than any other operator, so we need to wrap
|
||||
// any inner expression that is below (except for jumps if they don't have a value)
|
||||
let needs_inner_parens = prec < ExprPrecedence::Unambiguous && {
|
||||
prec != ExprPrecedence::Jump || !expr.is_ret_like_with_no_value()
|
||||
};
|
||||
let needs_inner_parens = prec.needs_parentheses_in(ExprPrecedence::Postfix);
|
||||
// given we are the higher precedence, no parent expression will have stronger requirements
|
||||
let needs_outer_parens = false;
|
||||
(needs_outer_parens, needs_inner_parens)
|
||||
} else {
|
||||
// We need to wrap all binary like things, thats everything below prefix except for jumps
|
||||
let needs_inner_parens = prec < ExprPrecedence::Prefix && prec != ExprPrecedence::Jump;
|
||||
let needs_inner_parens = prec.needs_parentheses_in(ExprPrecedence::Prefix);
|
||||
let parent = expr
|
||||
.syntax()
|
||||
.parent()
|
||||
.and_then(ast::Expr::cast)
|
||||
// if we are already wrapped, great, no need to wrap again
|
||||
.filter(|it| !matches!(it, ast::Expr::ParenExpr(_)))
|
||||
.map(|it| it.precedence());
|
||||
.map(|it| it.precedence())
|
||||
.filter(|&prec| prec != ExprPrecedence::Unambiguous);
|
||||
|
||||
// if we have no parent, we don't need outer parens to disambiguate
|
||||
// otherwise anything with higher precedence than what we insert needs to wrap us
|
||||
let needs_outer_parens = parent.is_some_and(|prec| prec > ExprPrecedence::Prefix);
|
||||
let needs_outer_parens = parent
|
||||
.is_some_and(|parent_prec| ExprPrecedence::Prefix.needs_parentheses_in(parent_prec));
|
||||
(needs_outer_parens, needs_inner_parens)
|
||||
}
|
||||
}
|
||||
|
|
@ -291,7 +289,7 @@ mod tests {
|
|||
};
|
||||
|
||||
#[test]
|
||||
fn adjustment_hints() {
|
||||
fn adjustment_hints_prefix() {
|
||||
check_with_config(
|
||||
InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
|
||||
r#"
|
||||
|
|
@ -381,6 +379,8 @@ fn main() {
|
|||
&mut Struct[0];
|
||||
//^^^^^^(&mut $
|
||||
//^^^^^^)
|
||||
let _: (&mut (),) = (&mut (),);
|
||||
//^^^^^^^&mut *
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
|
|
@ -472,6 +472,9 @@ fn main() {
|
|||
//^^^^^^.&
|
||||
&mut Struct[0];
|
||||
//^^^^^^.&mut
|
||||
let _: (&mut (),) = (&mut (),);
|
||||
//^^^^^^^(
|
||||
//^^^^^^^).*.&mut
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ pub use crate::{
|
|||
navigation_target::{NavigationTarget, TryToNav, UpmappingResult},
|
||||
references::ReferenceSearchResult,
|
||||
rename::RenameError,
|
||||
runnables::{Runnable, RunnableKind, TestId},
|
||||
runnables::{Runnable, RunnableKind, TestId, UpdateTest},
|
||||
signature_help::SignatureHelp,
|
||||
static_index::{
|
||||
StaticIndex, StaticIndexedFile, TokenId, TokenStaticData, VendoredLibrariesConfig,
|
||||
|
|
|
|||
|
|
@ -184,11 +184,11 @@ pub(crate) fn def_to_kind(db: &RootDatabase, def: Definition) -> SymbolInformati
|
|||
|
||||
match def {
|
||||
Definition::Macro(it) => match it.kind(db) {
|
||||
MacroKind::Declarative => Macro,
|
||||
MacroKind::Derive => Attribute,
|
||||
MacroKind::BuiltIn => Macro,
|
||||
MacroKind::Attr => Attribute,
|
||||
MacroKind::ProcMacro => Macro,
|
||||
MacroKind::Derive
|
||||
| MacroKind::DeriveBuiltIn
|
||||
| MacroKind::AttrBuiltIn
|
||||
| MacroKind::Attr => Attribute,
|
||||
MacroKind::Declarative | MacroKind::DeclarativeBuiltIn | MacroKind::ProcMacro => Macro,
|
||||
},
|
||||
Definition::Field(..) | Definition::TupleField(..) => Field,
|
||||
Definition::Module(..) | Definition::Crate(..) => Module,
|
||||
|
|
|
|||
|
|
@ -187,6 +187,7 @@ impl StaticIndex<'_> {
|
|||
max_fields_count: Some(5),
|
||||
max_enum_variants_count: Some(5),
|
||||
max_subst_ty_len: SubstTyLen::Unlimited,
|
||||
show_drop_glue: true,
|
||||
};
|
||||
let tokens = tokens.filter(|token| {
|
||||
matches!(
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ mod escape;
|
|||
mod format;
|
||||
mod highlight;
|
||||
mod inject;
|
||||
mod macro_;
|
||||
|
||||
mod html;
|
||||
#[cfg(test)]
|
||||
|
|
@ -15,14 +14,14 @@ mod tests;
|
|||
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use hir::{InRealFile, Name, Semantics};
|
||||
use hir::{HirFileIdExt, InFile, InRealFile, MacroFileIdExt, MacroKind, Name, Semantics};
|
||||
use ide_db::{FxHashMap, Ranker, RootDatabase, SymbolKind};
|
||||
use span::EditionedFileId;
|
||||
use syntax::{
|
||||
ast::{self, IsString},
|
||||
AstNode, AstToken, NodeOrToken,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, TextRange, WalkEvent, T,
|
||||
SyntaxNode, SyntaxToken, TextRange, WalkEvent, T,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
|
|
@ -30,7 +29,6 @@ use crate::{
|
|||
escape::{highlight_escape_byte, highlight_escape_char, highlight_escape_string},
|
||||
format::highlight_format_string,
|
||||
highlights::Highlights,
|
||||
macro_::MacroHighlighter,
|
||||
tags::Highlight,
|
||||
},
|
||||
FileId, HlMod, HlOperator, HlPunct, HlTag,
|
||||
|
|
@ -221,7 +219,7 @@ pub(crate) fn highlight(
|
|||
Some(it) => it.krate(),
|
||||
None => return hl.to_vec(),
|
||||
};
|
||||
traverse(&mut hl, &sema, config, file_id, &root, krate, range_to_highlight);
|
||||
traverse(&mut hl, &sema, config, InRealFile::new(file_id, &root), krate, range_to_highlight);
|
||||
hl.to_vec()
|
||||
}
|
||||
|
||||
|
|
@ -229,8 +227,7 @@ fn traverse(
|
|||
hl: &mut Highlights,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: HighlightConfig,
|
||||
file_id: EditionedFileId,
|
||||
root: &SyntaxNode,
|
||||
InRealFile { file_id, value: root }: InRealFile<&SyntaxNode>,
|
||||
krate: hir::Crate,
|
||||
range_to_highlight: TextRange,
|
||||
) {
|
||||
|
|
@ -252,18 +249,15 @@ fn traverse(
|
|||
|
||||
let mut tt_level = 0;
|
||||
let mut attr_or_derive_item = None;
|
||||
let mut current_macro: Option<ast::Macro> = None;
|
||||
let mut macro_highlighter = MacroHighlighter::default();
|
||||
|
||||
// FIXME: these are not perfectly accurate, we determine them by the real file's syntax tree
|
||||
// an attribute nested in a macro call will not emit `inside_attribute`
|
||||
let mut inside_attribute = false;
|
||||
let mut inside_macro_call = false;
|
||||
let mut inside_proc_macro_call = false;
|
||||
|
||||
// Walk all nodes, keeping track of whether we are inside a macro or not.
|
||||
// If in macro, expand it first and highlight the expanded code.
|
||||
for event in root.preorder_with_tokens() {
|
||||
let mut preorder = root.preorder_with_tokens();
|
||||
while let Some(event) = preorder.next() {
|
||||
use WalkEvent::{Enter, Leave};
|
||||
|
||||
let range = match &event {
|
||||
|
|
@ -275,16 +269,11 @@ fn traverse(
|
|||
continue;
|
||||
}
|
||||
|
||||
// set macro and attribute highlighting states
|
||||
match event.clone() {
|
||||
Enter(NodeOrToken::Node(node))
|
||||
if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) =>
|
||||
{
|
||||
Enter(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
|
||||
tt_level += 1;
|
||||
}
|
||||
Leave(NodeOrToken::Node(node))
|
||||
if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) =>
|
||||
{
|
||||
Leave(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
|
||||
tt_level -= 1;
|
||||
}
|
||||
Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
|
||||
|
|
@ -297,28 +286,14 @@ fn traverse(
|
|||
Enter(NodeOrToken::Node(node)) => {
|
||||
if let Some(item) = ast::Item::cast(node.clone()) {
|
||||
match item {
|
||||
ast::Item::MacroRules(mac) => {
|
||||
macro_highlighter.init();
|
||||
current_macro = Some(mac.into());
|
||||
continue;
|
||||
}
|
||||
ast::Item::MacroDef(mac) => {
|
||||
macro_highlighter.init();
|
||||
current_macro = Some(mac.into());
|
||||
continue;
|
||||
}
|
||||
ast::Item::Fn(_) | ast::Item::Const(_) | ast::Item::Static(_) => {
|
||||
bindings_shadow_count.clear()
|
||||
}
|
||||
ast::Item::MacroCall(ref macro_call) => {
|
||||
inside_macro_call = true;
|
||||
inside_proc_macro_call = sema.is_proc_macro_call(macro_call);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
if attr_or_derive_item.is_none() {
|
||||
if sema.is_attr_macro_call(&item) {
|
||||
if sema.is_attr_macro_call(InFile::new(file_id.into(), &item)) {
|
||||
attr_or_derive_item = Some(AttrOrDerive::Attr(item));
|
||||
} else {
|
||||
let adt = match item {
|
||||
|
|
@ -328,7 +303,10 @@ fn traverse(
|
|||
_ => None,
|
||||
};
|
||||
match adt {
|
||||
Some(adt) if sema.is_derive_annotated(&adt) => {
|
||||
Some(adt)
|
||||
if sema
|
||||
.is_derive_annotated(InFile::new(file_id.into(), &adt)) =>
|
||||
{
|
||||
attr_or_derive_item =
|
||||
Some(AttrOrDerive::Derive(ast::Item::from(adt)));
|
||||
}
|
||||
|
|
@ -340,25 +318,11 @@ fn traverse(
|
|||
}
|
||||
Leave(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => {
|
||||
match ast::Item::cast(node.clone()) {
|
||||
Some(ast::Item::MacroRules(mac)) => {
|
||||
assert_eq!(current_macro, Some(mac.into()));
|
||||
current_macro = None;
|
||||
macro_highlighter = MacroHighlighter::default();
|
||||
}
|
||||
Some(ast::Item::MacroDef(mac)) => {
|
||||
assert_eq!(current_macro, Some(mac.into()));
|
||||
current_macro = None;
|
||||
macro_highlighter = MacroHighlighter::default();
|
||||
}
|
||||
Some(item)
|
||||
if attr_or_derive_item.as_ref().is_some_and(|it| *it.item() == item) =>
|
||||
{
|
||||
attr_or_derive_item = None;
|
||||
}
|
||||
Some(ast::Item::MacroCall(_)) => {
|
||||
inside_macro_call = false;
|
||||
inside_proc_macro_call = false;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
|
@ -379,12 +343,6 @@ fn traverse(
|
|||
}
|
||||
};
|
||||
|
||||
if current_macro.is_some() {
|
||||
if let Some(tok) = element.as_token() {
|
||||
macro_highlighter.advance(tok);
|
||||
}
|
||||
}
|
||||
|
||||
let element = match element.clone() {
|
||||
NodeOrToken::Node(n) => match ast::NameLike::cast(n) {
|
||||
Some(n) => NodeOrToken::Node(n),
|
||||
|
|
@ -392,7 +350,7 @@ fn traverse(
|
|||
},
|
||||
NodeOrToken::Token(t) => NodeOrToken::Token(t),
|
||||
};
|
||||
let token = element.as_token().cloned();
|
||||
let original_token = element.as_token().cloned();
|
||||
|
||||
// Descending tokens into macros is expensive even if no descending occurs, so make sure
|
||||
// that we actually are in a position where descending is possible.
|
||||
|
|
@ -405,144 +363,52 @@ fn traverse(
|
|||
|
||||
let descended_element = if in_macro {
|
||||
// Attempt to descend tokens into macro-calls.
|
||||
let res = match element {
|
||||
NodeOrToken::Token(token) if token.kind() != COMMENT => {
|
||||
let ranker = Ranker::from_token(&token);
|
||||
|
||||
let mut t = None;
|
||||
let mut r = 0;
|
||||
sema.descend_into_macros_breakable(
|
||||
InRealFile::new(file_id, token.clone()),
|
||||
|tok, _ctx| {
|
||||
// FIXME: Consider checking ctx transparency for being opaque?
|
||||
let tok = tok.value;
|
||||
let my_rank = ranker.rank_token(&tok);
|
||||
|
||||
if my_rank >= Ranker::MAX_RANK {
|
||||
// a rank of 0b1110 means that we have found a maximally interesting
|
||||
// token so stop early.
|
||||
t = Some(tok);
|
||||
return ControlFlow::Break(());
|
||||
}
|
||||
|
||||
// r = r.max(my_rank);
|
||||
// t = Some(t.take_if(|_| r < my_rank).unwrap_or(tok));
|
||||
match &mut t {
|
||||
Some(prev) if r < my_rank => {
|
||||
*prev = tok;
|
||||
r = my_rank;
|
||||
}
|
||||
Some(_) => (),
|
||||
None => {
|
||||
r = my_rank;
|
||||
t = Some(tok)
|
||||
}
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
},
|
||||
);
|
||||
|
||||
let token = t.unwrap_or(token);
|
||||
match token.parent().and_then(ast::NameLike::cast) {
|
||||
// Remap the token into the wrapping single token nodes
|
||||
Some(parent) => match (token.kind(), parent.syntax().kind()) {
|
||||
(T![self] | T![ident], NAME | NAME_REF) => NodeOrToken::Node(parent),
|
||||
(T![self] | T![super] | T![crate] | T![Self], NAME_REF) => {
|
||||
NodeOrToken::Node(parent)
|
||||
}
|
||||
(INT_NUMBER, NAME_REF) => NodeOrToken::Node(parent),
|
||||
(LIFETIME_IDENT, LIFETIME) => NodeOrToken::Node(parent),
|
||||
_ => NodeOrToken::Token(token),
|
||||
},
|
||||
None => NodeOrToken::Token(token),
|
||||
}
|
||||
}
|
||||
e => e,
|
||||
};
|
||||
res
|
||||
match element {
|
||||
NodeOrToken::Token(token) => descend_token(sema, InRealFile::new(file_id, token)),
|
||||
n => InFile::new(file_id.into(), n),
|
||||
}
|
||||
} else {
|
||||
element
|
||||
InFile::new(file_id.into(), element)
|
||||
};
|
||||
|
||||
// FIXME: do proper macro def highlighting https://github.com/rust-lang/rust-analyzer/issues/6232
|
||||
// Skip metavariables from being highlighted to prevent keyword highlighting in them
|
||||
if descended_element.as_token().and_then(|t| macro_highlighter.highlight(t)).is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// string highlight injections, note this does not use the descended element as proc-macros
|
||||
// can rewrite string literals which invalidates our indices
|
||||
if let (Some(token), Some(descended_token)) = (token, descended_element.as_token()) {
|
||||
if ast::String::can_cast(token.kind()) && ast::String::can_cast(descended_token.kind())
|
||||
{
|
||||
let string = ast::String::cast(token);
|
||||
let string_to_highlight = ast::String::cast(descended_token.clone());
|
||||
if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
|
||||
if string.is_raw()
|
||||
&& inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
highlight_format_string(
|
||||
hl,
|
||||
sema,
|
||||
krate,
|
||||
&string,
|
||||
&expanded_string,
|
||||
range,
|
||||
file_id.edition(),
|
||||
);
|
||||
|
||||
if !string.is_raw() {
|
||||
highlight_escape_string(hl, &string, range.start());
|
||||
}
|
||||
}
|
||||
} else if ast::ByteString::can_cast(token.kind())
|
||||
&& ast::ByteString::can_cast(descended_token.kind())
|
||||
{
|
||||
if let Some(byte_string) = ast::ByteString::cast(token) {
|
||||
if !byte_string.is_raw() {
|
||||
highlight_escape_string(hl, &byte_string, range.start());
|
||||
}
|
||||
}
|
||||
} else if ast::CString::can_cast(token.kind())
|
||||
&& ast::CString::can_cast(descended_token.kind())
|
||||
{
|
||||
if let Some(c_string) = ast::CString::cast(token) {
|
||||
if !c_string.is_raw() {
|
||||
highlight_escape_string(hl, &c_string, range.start());
|
||||
}
|
||||
}
|
||||
} else if ast::Char::can_cast(token.kind())
|
||||
&& ast::Char::can_cast(descended_token.kind())
|
||||
{
|
||||
let Some(char) = ast::Char::cast(token) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
highlight_escape_char(hl, &char, range.start())
|
||||
} else if ast::Byte::can_cast(token.kind())
|
||||
&& ast::Byte::can_cast(descended_token.kind())
|
||||
{
|
||||
let Some(byte) = ast::Byte::cast(token) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
highlight_escape_byte(hl, &byte, range.start())
|
||||
// string highlight injections
|
||||
if let (Some(original_token), Some(descended_token)) =
|
||||
(original_token, descended_element.value.as_token())
|
||||
{
|
||||
let control_flow = string_injections(
|
||||
hl,
|
||||
sema,
|
||||
config,
|
||||
file_id,
|
||||
krate,
|
||||
original_token,
|
||||
descended_token,
|
||||
);
|
||||
if control_flow.is_break() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let element = match descended_element {
|
||||
NodeOrToken::Node(name_like) => highlight::name_like(
|
||||
sema,
|
||||
krate,
|
||||
&mut bindings_shadow_count,
|
||||
config.syntactic_name_ref_highlighting,
|
||||
name_like,
|
||||
file_id.edition(),
|
||||
),
|
||||
let edition = descended_element.file_id.edition(sema.db);
|
||||
let element = match descended_element.value {
|
||||
NodeOrToken::Node(name_like) => {
|
||||
let hl = highlight::name_like(
|
||||
sema,
|
||||
krate,
|
||||
&mut bindings_shadow_count,
|
||||
config.syntactic_name_ref_highlighting,
|
||||
name_like,
|
||||
edition,
|
||||
);
|
||||
if hl.is_some() && !in_macro {
|
||||
// skip highlighting the contained token of our name-like node
|
||||
// as that would potentially overwrite our result
|
||||
preorder.skip_subtree();
|
||||
}
|
||||
hl
|
||||
}
|
||||
NodeOrToken::Token(token) => {
|
||||
highlight::token(sema, token, file_id.edition()).zip(Some(None))
|
||||
highlight::token(sema, token, edition, tt_level > 0).zip(Some(None))
|
||||
}
|
||||
};
|
||||
if let Some((mut highlight, binding_hash)) = element {
|
||||
|
|
@ -551,13 +417,6 @@ fn traverse(
|
|||
// let the editor do its highlighting for these tokens instead
|
||||
continue;
|
||||
}
|
||||
if highlight.tag == HlTag::UnresolvedReference
|
||||
&& matches!(attr_or_derive_item, Some(AttrOrDerive::Derive(_)) if inside_attribute)
|
||||
{
|
||||
// do not emit unresolved references in derive helpers if the token mapping maps to
|
||||
// something unresolvable. FIXME: There should be a way to prevent that
|
||||
continue;
|
||||
}
|
||||
|
||||
// apply config filtering
|
||||
if !filter_by_config(&mut highlight, config) {
|
||||
|
|
@ -567,8 +426,9 @@ fn traverse(
|
|||
if inside_attribute {
|
||||
highlight |= HlMod::Attribute
|
||||
}
|
||||
if inside_macro_call && tt_level > 0 {
|
||||
if inside_proc_macro_call {
|
||||
if let Some(m) = descended_element.file_id.macro_file() {
|
||||
if let MacroKind::ProcMacro | MacroKind::Attr | MacroKind::Derive = m.kind(sema.db)
|
||||
{
|
||||
highlight |= HlMod::ProcMacro
|
||||
}
|
||||
highlight |= HlMod::Macro
|
||||
|
|
@ -579,6 +439,99 @@ fn traverse(
|
|||
}
|
||||
}
|
||||
|
||||
fn string_injections(
|
||||
hl: &mut Highlights,
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: HighlightConfig,
|
||||
file_id: EditionedFileId,
|
||||
krate: hir::Crate,
|
||||
token: SyntaxToken,
|
||||
descended_token: &SyntaxToken,
|
||||
) -> ControlFlow<()> {
|
||||
if !matches!(token.kind(), STRING | BYTE_STRING | BYTE | CHAR | C_STRING) {
|
||||
return ControlFlow::Continue(());
|
||||
}
|
||||
if let Some(string) = ast::String::cast(token.clone()) {
|
||||
if let Some(descended_string) = ast::String::cast(descended_token.clone()) {
|
||||
if string.is_raw()
|
||||
&& inject::ra_fixture(hl, sema, config, &string, &descended_string).is_some()
|
||||
{
|
||||
return ControlFlow::Break(());
|
||||
}
|
||||
highlight_format_string(hl, sema, krate, &string, &descended_string, file_id.edition());
|
||||
|
||||
if !string.is_raw() {
|
||||
highlight_escape_string(hl, &string);
|
||||
}
|
||||
}
|
||||
} else if let Some(byte_string) = ast::ByteString::cast(token.clone()) {
|
||||
if !byte_string.is_raw() {
|
||||
highlight_escape_string(hl, &byte_string);
|
||||
}
|
||||
} else if let Some(c_string) = ast::CString::cast(token.clone()) {
|
||||
if !c_string.is_raw() {
|
||||
highlight_escape_string(hl, &c_string);
|
||||
}
|
||||
} else if let Some(char) = ast::Char::cast(token.clone()) {
|
||||
highlight_escape_char(hl, &char)
|
||||
} else if let Some(byte) = ast::Byte::cast(token) {
|
||||
highlight_escape_byte(hl, &byte)
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
|
||||
fn descend_token(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
token: InRealFile<SyntaxToken>,
|
||||
) -> InFile<NodeOrToken<ast::NameLike, SyntaxToken>> {
|
||||
if token.value.kind() == COMMENT {
|
||||
return token.map(NodeOrToken::Token).into();
|
||||
}
|
||||
let ranker = Ranker::from_token(&token.value);
|
||||
|
||||
let mut t = None;
|
||||
let mut r = 0;
|
||||
sema.descend_into_macros_breakable(token.clone(), |tok, _ctx| {
|
||||
// FIXME: Consider checking ctx transparency for being opaque?
|
||||
let my_rank = ranker.rank_token(&tok.value);
|
||||
|
||||
if my_rank >= Ranker::MAX_RANK {
|
||||
// a rank of 0b1110 means that we have found a maximally interesting
|
||||
// token so stop early.
|
||||
t = Some(tok);
|
||||
return ControlFlow::Break(());
|
||||
}
|
||||
|
||||
// r = r.max(my_rank);
|
||||
// t = Some(t.take_if(|_| r < my_rank).unwrap_or(tok));
|
||||
match &mut t {
|
||||
Some(prev) if r < my_rank => {
|
||||
*prev = tok;
|
||||
r = my_rank;
|
||||
}
|
||||
Some(_) => (),
|
||||
None => {
|
||||
r = my_rank;
|
||||
t = Some(tok)
|
||||
}
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
|
||||
let token = t.unwrap_or_else(|| token.into());
|
||||
token.map(|token| match token.parent().and_then(ast::NameLike::cast) {
|
||||
// Remap the token into the wrapping single token nodes
|
||||
Some(parent) => match (token.kind(), parent.syntax().kind()) {
|
||||
(T![ident] | T![self], NAME)
|
||||
| (T![ident] | T![self] | T![super] | T![crate] | T![Self], NAME_REF)
|
||||
| (INT_NUMBER, NAME_REF)
|
||||
| (LIFETIME_IDENT, LIFETIME) => NodeOrToken::Node(parent),
|
||||
_ => NodeOrToken::Token(token),
|
||||
},
|
||||
None => NodeOrToken::Token(token),
|
||||
})
|
||||
}
|
||||
|
||||
fn filter_by_config(highlight: &mut Highlight, config: HighlightConfig) -> bool {
|
||||
match &mut highlight.tag {
|
||||
HlTag::StringLiteral if !config.strings => return false,
|
||||
|
|
|
|||
|
|
@ -4,12 +4,9 @@ use crate::{HlRange, HlTag};
|
|||
use syntax::ast::{Byte, Char, IsString};
|
||||
use syntax::{AstToken, TextRange, TextSize};
|
||||
|
||||
pub(super) fn highlight_escape_string<T: IsString>(
|
||||
stack: &mut Highlights,
|
||||
string: &T,
|
||||
start: TextSize,
|
||||
) {
|
||||
pub(super) fn highlight_escape_string<T: IsString>(stack: &mut Highlights, string: &T) {
|
||||
let text = string.text();
|
||||
let start = string.syntax().text_range().start();
|
||||
string.escaped_char_ranges(&mut |piece_range, char| {
|
||||
if text[piece_range.start().into()..].starts_with('\\') {
|
||||
let highlight = match char {
|
||||
|
|
@ -25,7 +22,7 @@ pub(super) fn highlight_escape_string<T: IsString>(
|
|||
});
|
||||
}
|
||||
|
||||
pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start: TextSize) {
|
||||
pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char) {
|
||||
if char.value().is_err() {
|
||||
// We do not emit invalid escapes highlighting here. The lexer would likely be in a bad
|
||||
// state and this token contains junk, since `'` is not a reliable delimiter (consider
|
||||
|
|
@ -42,11 +39,14 @@ pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start:
|
|||
return;
|
||||
};
|
||||
|
||||
let range = TextRange::at(start + TextSize::from(1), TextSize::from(text.len() as u32));
|
||||
let range = TextRange::at(
|
||||
char.syntax().text_range().start() + TextSize::from(1),
|
||||
TextSize::from(text.len() as u32),
|
||||
);
|
||||
stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
|
||||
}
|
||||
|
||||
pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte, start: TextSize) {
|
||||
pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte) {
|
||||
if byte.value().is_err() {
|
||||
// See `highlight_escape_char` for why no error highlighting here.
|
||||
return;
|
||||
|
|
@ -61,6 +61,9 @@ pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte, start:
|
|||
return;
|
||||
};
|
||||
|
||||
let range = TextRange::at(start + TextSize::from(2), TextSize::from(text.len() as u32));
|
||||
let range = TextRange::at(
|
||||
byte.syntax().text_range().start() + TextSize::from(2),
|
||||
TextSize::from(text.len() as u32),
|
||||
);
|
||||
stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None })
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ use ide_db::{
|
|||
SymbolKind,
|
||||
};
|
||||
use span::Edition;
|
||||
use syntax::{ast, TextRange};
|
||||
use syntax::{ast, AstToken};
|
||||
|
||||
use crate::{
|
||||
syntax_highlighting::{highlight::highlight_def, highlights::Highlights},
|
||||
|
|
@ -18,15 +18,15 @@ pub(super) fn highlight_format_string(
|
|||
krate: hir::Crate,
|
||||
string: &ast::String,
|
||||
expanded_string: &ast::String,
|
||||
range: TextRange,
|
||||
edition: Edition,
|
||||
) {
|
||||
if is_format_string(expanded_string) {
|
||||
let start = string.syntax().text_range().start();
|
||||
// FIXME: Replace this with the HIR info we have now.
|
||||
lex_format_specifiers(string, &mut |piece_range, kind| {
|
||||
if let Some(highlight) = highlight_format_specifier(kind) {
|
||||
stack.add(HlRange {
|
||||
range: piece_range + range.start(),
|
||||
range: piece_range + start,
|
||||
highlight: highlight.into(),
|
||||
binding_hash: None,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ pub(super) fn token(
|
|||
sema: &Semantics<'_, RootDatabase>,
|
||||
token: SyntaxToken,
|
||||
edition: Edition,
|
||||
in_tt: bool,
|
||||
) -> Option<Highlight> {
|
||||
if let Some(comment) = ast::Comment::cast(token.clone()) {
|
||||
let h = HlTag::Comment;
|
||||
|
|
@ -40,13 +41,20 @@ pub(super) fn token(
|
|||
INT_NUMBER | FLOAT_NUMBER => HlTag::NumericLiteral.into(),
|
||||
BYTE => HlTag::ByteLiteral.into(),
|
||||
CHAR => HlTag::CharLiteral.into(),
|
||||
IDENT if token.parent().and_then(ast::TokenTree::cast).is_some() => {
|
||||
IDENT if in_tt => {
|
||||
// from this point on we are inside a token tree, this only happens for identifiers
|
||||
// that were not mapped down into macro invocations
|
||||
HlTag::None.into()
|
||||
}
|
||||
p if p.is_punct() => punctuation(sema, token, p),
|
||||
k if k.is_keyword(edition) => keyword(sema, token, k)?,
|
||||
k if k.is_keyword(edition) => {
|
||||
if in_tt && token.prev_token().is_some_and(|t| t.kind() == T![$]) {
|
||||
// we are likely within a macro definition where our keyword is a fragment name
|
||||
HlTag::None.into()
|
||||
} else {
|
||||
keyword(sema, token, k)?
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
Some(highlight)
|
||||
|
|
@ -81,7 +89,7 @@ pub(super) fn name_like(
|
|||
Some(IdentClass::NameRefClass(NameRefClass::Definition(def, _))) => {
|
||||
highlight_def(sema, krate, def, edition)
|
||||
}
|
||||
// FIXME: Fallback for 'static and '_, as we do not resolve these yet
|
||||
// FIXME: Fallback for '_, as we do not resolve these yet
|
||||
_ => SymbolKind::LifetimeParam.into(),
|
||||
},
|
||||
};
|
||||
|
|
@ -214,12 +222,6 @@ fn keyword(
|
|||
T![true] | T![false] => HlTag::BoolLiteral.into(),
|
||||
// crate is handled just as a token if it's in an `extern crate`
|
||||
T![crate] if parent_matches::<ast::ExternCrate>(&token) => h,
|
||||
// self, crate, super and `Self` are handled as either a Name or NameRef already, unless they
|
||||
// are inside unmapped token trees
|
||||
T![self] | T![crate] | T![super] | T![Self] if parent_matches::<ast::NameRef>(&token) => {
|
||||
return None
|
||||
}
|
||||
T![self] if parent_matches::<ast::Name>(&token) => return None,
|
||||
T![ref] => match token.parent().and_then(ast::IdentPat::cast) {
|
||||
Some(ident) if sema.is_unsafe_ident_pat(&ident) => h | HlMod::Unsafe,
|
||||
_ => h,
|
||||
|
|
|
|||
|
|
@ -1,128 +0,0 @@
|
|||
//! Syntax highlighting for macro_rules!.
|
||||
use syntax::{SyntaxKind, SyntaxToken, TextRange, T};
|
||||
|
||||
use crate::{HlRange, HlTag};
|
||||
|
||||
#[derive(Default)]
|
||||
pub(super) struct MacroHighlighter {
|
||||
state: Option<MacroMatcherParseState>,
|
||||
}
|
||||
|
||||
impl MacroHighlighter {
|
||||
pub(super) fn init(&mut self) {
|
||||
self.state = Some(MacroMatcherParseState::default());
|
||||
}
|
||||
|
||||
pub(super) fn advance(&mut self, token: &SyntaxToken) {
|
||||
if let Some(state) = self.state.as_mut() {
|
||||
update_macro_state(state, token);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn highlight(&self, token: &SyntaxToken) -> Option<HlRange> {
|
||||
if let Some(state) = self.state.as_ref() {
|
||||
if matches!(state.rule_state, RuleState::Matcher | RuleState::Expander) {
|
||||
if let Some(range) = is_metavariable(token) {
|
||||
return Some(HlRange {
|
||||
range,
|
||||
highlight: HlTag::UnresolvedReference.into(),
|
||||
binding_hash: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
struct MacroMatcherParseState {
|
||||
/// Opening and corresponding closing bracket of the matcher or expander of the current rule
|
||||
paren_ty: Option<(SyntaxKind, SyntaxKind)>,
|
||||
paren_level: usize,
|
||||
rule_state: RuleState,
|
||||
/// Whether we are inside the outer `{` `}` macro block that holds the rules
|
||||
in_invoc_body: bool,
|
||||
}
|
||||
|
||||
impl Default for MacroMatcherParseState {
|
||||
fn default() -> Self {
|
||||
MacroMatcherParseState {
|
||||
paren_ty: None,
|
||||
paren_level: 0,
|
||||
in_invoc_body: false,
|
||||
rule_state: RuleState::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
enum RuleState {
|
||||
Matcher,
|
||||
Expander,
|
||||
Between,
|
||||
None,
|
||||
}
|
||||
|
||||
impl RuleState {
|
||||
fn transition(&mut self) {
|
||||
*self = match self {
|
||||
RuleState::Matcher => RuleState::Between,
|
||||
RuleState::Expander => RuleState::None,
|
||||
RuleState::Between => RuleState::Expander,
|
||||
RuleState::None => RuleState::Matcher,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn update_macro_state(state: &mut MacroMatcherParseState, tok: &SyntaxToken) {
|
||||
if !state.in_invoc_body {
|
||||
if tok.kind() == T!['{'] || tok.kind() == T!['('] {
|
||||
state.in_invoc_body = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
match state.paren_ty {
|
||||
Some((open, close)) => {
|
||||
if tok.kind() == open {
|
||||
state.paren_level += 1;
|
||||
} else if tok.kind() == close {
|
||||
state.paren_level -= 1;
|
||||
if state.paren_level == 0 {
|
||||
state.rule_state.transition();
|
||||
state.paren_ty = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
match tok.kind() {
|
||||
T!['('] => {
|
||||
state.paren_ty = Some((T!['('], T![')']));
|
||||
}
|
||||
T!['{'] => {
|
||||
state.paren_ty = Some((T!['{'], T!['}']));
|
||||
}
|
||||
T!['['] => {
|
||||
state.paren_ty = Some((T!['['], T![']']));
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
if state.paren_ty.is_some() {
|
||||
state.paren_level = 1;
|
||||
state.rule_state.transition();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_metavariable(token: &SyntaxToken) -> Option<TextRange> {
|
||||
match token.kind() {
|
||||
kind if kind.is_any_identifier() => {
|
||||
if let Some(_dollar) = token.prev_token().filter(|t| t.kind() == T![$]) {
|
||||
return Some(token.text_range());
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
None
|
||||
}
|
||||
|
|
@ -49,26 +49,26 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="keyword unsafe">unsafe</span> <span class="brace">{</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">foo</span> <span class="operator">=</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
|
||||
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">o</span> <span class="operator">=</span> <span class="numeric_literal">0</span><span class="semicolon">;</span>
|
||||
<span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span>
|
||||
<span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis">(</span>
|
||||
<span class="string_literal macro">"%input = OpLoad _ {</span><span class="variable">0</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
|
||||
<span class="macro default_library library macro">concat</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"%result = "</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="string_literal macro">" _ %input"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span>
|
||||
<span class="string_literal macro">"OpStore {</span><span class="variable">1</span><span class="string_literal macro">} %result"</span><span class="comma macro">,</span>
|
||||
<span class="keyword control macro">in</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="operator macro">&</span><span class="variable macro">foo</span><span class="comma macro">,</span>
|
||||
<span class="keyword control macro">in</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="operator macro">&</span><span class="keyword macro">mut</span> <span class="variable macro mutable">o</span><span class="comma macro">,</span>
|
||||
<span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
|
||||
<span class="keyword">let</span> <span class="variable declaration">thread_id</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
|
||||
<span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"
|
||||
<span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"
|
||||
mov {</span><span class="variable">0</span><span class="string_literal macro">}, gs:[0x30]
|
||||
mov {</span><span class="variable">0</span><span class="string_literal macro">}, [{</span><span class="variable">0</span><span class="string_literal macro">}+0x48]
|
||||
"</span><span class="comma macro">,</span> <span class="keyword macro">out</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="variable macro">thread_id</span><span class="comma macro">,</span> <span class="keyword macro">options</span><span class="parenthesis macro">(</span><span class="keyword macro">pure</span><span class="comma macro">,</span> <span class="keyword macro">readonly</span><span class="comma macro">,</span> <span class="keyword macro">nostack</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
"</span><span class="comma macro">,</span> <span class="keyword macro">out</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="variable macro">thread_id</span><span class="comma macro">,</span> <span class="keyword macro">options</span><span class="parenthesis macro">(</span><span class="keyword macro">pure</span><span class="comma macro">,</span> <span class="keyword macro">readonly</span><span class="comma macro">,</span> <span class="keyword macro">nostack</span><span class="parenthesis macro">)</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
|
||||
<span class="keyword">static</span> <span class="static declaration">UNMAP_BASE</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
|
||||
<span class="keyword const">const</span> <span class="constant const declaration">MEM_RELEASE</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
|
||||
<span class="keyword">static</span> <span class="static declaration">VirtualFree</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
|
||||
<span class="keyword const">const</span> <span class="constant const declaration">OffPtr</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
|
||||
<span class="keyword const">const</span> <span class="constant const declaration">OffFn</span><span class="colon">:</span> <span class="builtin_type">usize</span><span class="semicolon">;</span>
|
||||
<span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"
|
||||
<span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="string_literal macro">"
|
||||
push {</span><span class="variable">free_type</span><span class="string_literal macro">}
|
||||
push {</span><span class="variable">free_size</span><span class="string_literal macro">}
|
||||
push {</span><span class="variable">base</span><span class="string_literal macro">}
|
||||
|
|
@ -92,7 +92,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
|
||||
<span class="variable declaration macro">base</span> <span class="operator macro">=</span> <span class="keyword macro">sym</span> <span class="static macro">UNMAP_BASE</span><span class="comma macro">,</span>
|
||||
<span class="keyword macro">options</span><span class="parenthesis macro">(</span><span class="keyword macro">noreturn</span><span class="parenthesis macro">)</span><span class="comma macro">,</span>
|
||||
<span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="brace">}</span>
|
||||
<span class="brace">}</span>
|
||||
<span class="comment">// taken from https://github.com/rust-embedded/cortex-m/blob/47921b51f8b960344fcfa1255a50a0d19efcde6d/cortex-m/src/asm.rs#L254-L274</span>
|
||||
|
|
@ -101,19 +101,19 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="comment">// Ensure thumb mode is set.</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">rv</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="value_param">rv</span> <span class="keyword">as</span> <span class="builtin_type">u32</span><span class="parenthesis">)</span> <span class="bitwise">|</span> <span class="numeric_literal">1</span><span class="semicolon">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">msp</span> <span class="operator">=</span> <span class="value_param">msp</span> <span class="keyword">as</span> <span class="builtin_type">u32</span><span class="semicolon">;</span>
|
||||
<span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span>
|
||||
<span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">arch</span><span class="operator">::</span><span class="macro default_library library unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis">(</span>
|
||||
<span class="string_literal macro">"mrs {</span><span class="variable">tmp</span><span class="string_literal macro">}, CONTROL"</span><span class="comma macro">,</span>
|
||||
<span class="string_literal macro">"bics {</span><span class="variable">tmp</span><span class="string_literal macro">}, {</span><span class="variable">spsel</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
|
||||
<span class="string_literal macro">"msr CONTROL, {</span><span class="variable">tmp</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
|
||||
<span class="string_literal macro">"isb"</span><span class="comma macro">,</span>
|
||||
<span class="string_literal macro">"msr MSP, {</span><span class="variable">msp</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
|
||||
<span class="string_literal macro">"bx {</span><span class="variable">rv</span><span class="string_literal macro">}"</span><span class="comma macro">,</span>
|
||||
<span class="comment macro">// `out(reg) _` is not permitted in a `noreturn` asm! call,</span>
|
||||
<span class="comment macro">// so instead use `in(reg) 0` and don't restore it afterwards.</span>
|
||||
<span class="comment">// `out(reg) _` is not permitted in a `noreturn` asm! call,</span>
|
||||
<span class="comment">// so instead use `in(reg) 0` and don't restore it afterwards.</span>
|
||||
<span class="variable declaration macro">tmp</span> <span class="operator macro">=</span> <span class="keyword control macro">in</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="numeric_literal macro">0</span><span class="comma macro">,</span>
|
||||
<span class="variable declaration macro">spsel</span> <span class="operator macro">=</span> <span class="keyword control macro">in</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="numeric_literal macro">2</span><span class="comma macro">,</span>
|
||||
<span class="variable declaration macro">msp</span> <span class="operator macro">=</span> <span class="keyword control macro">in</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="variable macro">msp</span><span class="comma macro">,</span>
|
||||
<span class="variable declaration macro">rv</span> <span class="operator macro">=</span> <span class="keyword control macro">in</span><span class="parenthesis macro">(</span><span class="reg library macro">reg</span><span class="parenthesis macro">)</span> <span class="variable macro">rv</span><span class="comma macro">,</span>
|
||||
<span class="keyword macro">options</span><span class="parenthesis macro">(</span><span class="keyword macro">noreturn</span><span class="comma macro">,</span> <span class="keyword macro">nomem</span><span class="comma macro">,</span> <span class="keyword macro">nostack</span><span class="parenthesis macro">)</span><span class="comma macro">,</span>
|
||||
<span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="brace">}</span></code></pre>
|
||||
|
|
@ -45,20 +45,20 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
|
||||
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
|
||||
</style>
|
||||
<pre><code><span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">allow</span><span class="parenthesis attribute">(</span><span class="none attribute">dead_code</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="tool_module attribute">rustfmt</span><span class="operator attribute">::</span><span class="tool_module attribute">skip</span><span class="attribute_bracket attribute">]</span>
|
||||
<pre><code><span class="attribute_bracket attribute macro proc_macro">#</span><span class="attribute_bracket attribute macro proc_macro">[</span><span class="builtin_attr attribute macro proc_macro">allow</span><span class="parenthesis attribute macro proc_macro">(</span><span class="none attribute macro proc_macro">dead_code</span><span class="parenthesis attribute macro proc_macro">)</span><span class="attribute_bracket attribute macro proc_macro">]</span>
|
||||
<span class="attribute_bracket attribute macro proc_macro">#</span><span class="attribute_bracket attribute macro proc_macro">[</span><span class="tool_module attribute macro proc_macro">rustfmt</span><span class="operator attribute macro proc_macro">::</span><span class="tool_module attribute macro proc_macro">skip</span><span class="attribute_bracket attribute macro proc_macro">]</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="module attribute crate_root library">proc_macros</span><span class="operator attribute">::</span><span class="attribute attribute library">identity</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Default</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="attribute_bracket attribute macro proc_macro">#</span><span class="attribute_bracket attribute macro proc_macro">[</span><span class="attribute attribute default_library library macro proc_macro">derive</span><span class="parenthesis attribute macro proc_macro">(</span><span class="derive attribute default_library library macro">Default</span><span class="parenthesis attribute macro proc_macro">)</span><span class="attribute_bracket attribute macro proc_macro">]</span>
|
||||
<span class="comment documentation">/// This is a doc comment</span>
|
||||
<span class="comment">// This is a normal comment</span>
|
||||
<span class="comment documentation">/// This is a doc comment</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="attribute_bracket attribute macro proc_macro">#</span><span class="attribute_bracket attribute macro proc_macro">[</span><span class="attribute attribute default_library library macro proc_macro">derive</span><span class="parenthesis attribute macro proc_macro">(</span><span class="derive attribute default_library library macro">Copy</span><span class="parenthesis attribute macro proc_macro">)</span><span class="attribute_bracket attribute macro proc_macro">]</span>
|
||||
<span class="comment">// This is another normal comment</span>
|
||||
<span class="comment documentation">/// This is another doc comment</span>
|
||||
<span class="comment">// This is another normal comment</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="comma attribute">,</span> <span class="unresolved_reference attribute">Unresolved</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="attribute_bracket attribute macro proc_macro">#</span><span class="attribute_bracket attribute macro proc_macro">[</span><span class="attribute attribute default_library library macro proc_macro">derive</span><span class="parenthesis attribute macro proc_macro">(</span><span class="derive attribute default_library library macro">Copy</span><span class="comma attribute macro proc_macro">,</span> <span class="unresolved_reference attribute macro">Unresolved</span><span class="parenthesis attribute macro proc_macro">)</span><span class="attribute_bracket attribute macro proc_macro">]</span>
|
||||
<span class="comment">// The reason for these being here is to test AttrIds</span>
|
||||
<span class="keyword">enum</span> <span class="enum declaration">Foo</span> <span class="brace">{</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="derive_helper attribute default_library library">default</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="enum_variant declaration">Bar</span>
|
||||
<span class="brace">}</span></code></pre>
|
||||
<span class="keyword macro proc_macro">enum</span> <span class="enum declaration macro proc_macro">Foo</span> <span class="brace macro proc_macro">{</span>
|
||||
<span class="attribute_bracket attribute macro proc_macro">#</span><span class="attribute_bracket attribute macro proc_macro">[</span><span class="derive_helper attribute default_library library macro proc_macro">default</span><span class="attribute_bracket attribute macro proc_macro">]</span>
|
||||
<span class="enum_variant declaration macro proc_macro">Bar</span>
|
||||
<span class="brace macro proc_macro">}</span></code></pre>
|
||||
|
|
@ -53,9 +53,9 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="brace">}</span><span class="semicolon">;</span>
|
||||
<span class="brace">}</span>
|
||||
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||
<span class="macro public">foo</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="struct declaration macro public">Foo</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="macro public">foo</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="struct declaration macro public">Foo</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="keyword">mod</span> <span class="module declaration">module</span> <span class="brace">{</span>
|
||||
<span class="macro public">foo</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="struct declaration macro public">Bar</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="macro public">foo</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="struct declaration macro public">Bar</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="keyword">fn</span> <span class="function declaration">func</span><span class="parenthesis">(</span><span class="punctuation">_</span><span class="colon">:</span> <span class="module">y</span><span class="operator">::</span><span class="struct public">Bar</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||
<span class="keyword">mod</span> <span class="module declaration">inner</span> <span class="brace">{</span>
|
||||
<span class="keyword">struct</span> <span class="struct declaration">Innerest</span><span class="angle"><</span><span class="keyword">const</span> <span class="const_param const declaration">C</span><span class="colon">:</span> <span class="unresolved_reference">usize</span><span class="angle">></span> <span class="brace">{</span> <span class="field declaration">field</span><span class="colon">:</span> <span class="bracket">[</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span> <span class="brace">{</span><span class="const_param const">C</span><span class="brace">}</span><span class="bracket">]</span> <span class="brace">}</span>
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="keyword const">const</span> <span class="brace">{</span>
|
||||
<span class="keyword">const</span> <span class="punctuation">|</span><span class="punctuation">|</span> <span class="brace">{</span><span class="brace">}</span>
|
||||
<span class="brace">}</span>
|
||||
<span class="macro public">id</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span>
|
||||
<span class="macro public">id</span><span class="macro_bang">!</span><span class="parenthesis">(</span>
|
||||
<span class="constant const macro">CONST_ITEM</span><span class="semicolon macro">;</span>
|
||||
<span class="const_param const macro">CONST_PARAM</span><span class="semicolon macro">;</span>
|
||||
<span class="keyword const macro">const</span> <span class="brace macro">{</span>
|
||||
|
|
@ -65,7 +65,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="brace macro">}</span><span class="semicolon macro">;</span>
|
||||
<span class="operator macro">&</span><span class="keyword macro">raw</span> <span class="keyword macro">const</span> <span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon macro">;</span>
|
||||
<span class="keyword macro">const</span>
|
||||
<span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="parenthesis">(</span><span class="parenthesis">)</span><span class="operator">.</span><span class="method const consuming trait">assoc_const_method</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="brace">}</span>
|
||||
<span class="keyword">trait</span> <span class="trait declaration">ConstTrait</span> <span class="brace">{</span>
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">///</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// calls bar on foo</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">assert</span><span class="macro_bang injected">!</span><span class="parenthesis injected macro">(</span><span class="none injected macro">foo</span><span class="operator injected macro">.</span><span class="none injected macro">bar</span><span class="parenthesis injected macro">(</span><span class="parenthesis injected macro">)</span><span class="parenthesis injected macro">)</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">assert</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="none injected">foo</span><span class="operator injected">.</span><span class="none injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">///</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">bar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="variable injected">foo</span><span class="operator injected">.</span><span class="field injected">bar</span><span class="none injected"> </span><span class="logical injected">||</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="constant injected">bar</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">///</span>
|
||||
|
|
@ -156,8 +156,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="brace">}</span>
|
||||
|
||||
<span class="comment documentation">/// ```</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">macro_rules</span><span class="macro_bang injected">!</span><span class="none injected"> </span><span class="macro declaration injected public">noop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="colon injected">:</span><span class="none injected">expr</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">=</span><span class="operator injected">></span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="punctuation injected">$</span><span class="none injected">expr </span><span class="brace injected">}</span><span class="brace injected">}</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected public">noop</span><span class="macro_bang injected">!</span><span class="parenthesis injected macro">(</span><span class="numeric_literal injected macro">1</span><span class="parenthesis injected macro">)</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">macro_rules</span><span class="macro_bang injected">!</span><span class="none injected"> </span><span class="macro declaration injected public">noop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="colon injected">:</span><span class="none injected">expr</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">=</span><span class="operator injected">></span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="none injected"> </span><span class="brace injected">}</span><span class="brace injected">}</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected public">noop</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="numeric_literal injected macro">1</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">/// ```</span>
|
||||
<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration public">noop</span> <span class="brace">{</span>
|
||||
<span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="operator">></span> <span class="brace">{</span>
|
||||
|
|
@ -177,7 +177,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="comment documentation">///</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```rust"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```ignore"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="module injected">alloc</span><span class="operator injected">::</span><span class="macro injected">vec</span><span class="macro_bang injected">!</span><span class="bracket injected macro">[</span><span class="numeric_literal injected macro">1</span><span class="comma injected macro">,</span><span class="none injected"> </span><span class="numeric_literal injected macro">2</span><span class="comma injected macro">,</span><span class="none injected"> </span><span class="numeric_literal injected macro">3</span><span class="bracket injected macro">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="module injected">alloc</span><span class="operator injected">::</span><span class="macro injected">vec</span><span class="macro_bang injected">!</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">/// ```</span>
|
||||
<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">mix_and_match</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
|
||||
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
|
||||
<span class="keyword">use</span> <span class="self_keyword crate_root public">self</span><span class="operator">::</span><span class="struct">FooCopy</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="struct declaration">BarCopy</span><span class="brace">}</span><span class="semicolon">;</span>
|
||||
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="attribute attribute default_library library">derive</span><span class="parenthesis attribute">(</span><span class="derive attribute default_library library macro">Copy</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="keyword">struct</span> <span class="struct declaration">FooCopy</span> <span class="brace">{</span>
|
||||
<span class="field declaration">x</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span>
|
||||
<span class="brace">}</span>
|
||||
|
|
@ -151,7 +151,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="keyword">let</span> <span class="variable callable declaration">bar</span> <span class="operator">=</span> <span class="struct">Foo</span><span class="operator">::</span><span class="method associated consuming">baz</span><span class="semicolon">;</span>
|
||||
|
||||
<span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="numeric_literal">-</span><span class="numeric_literal">42</span><span class="comma">,</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="operator">-</span><span class="variable">baz</span><span class="operator">.</span><span class="field">0</span><span class="semicolon">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">baz</span> <span class="operator">=</span> <span class="operator">-</span><span class="variable">baz</span><span class="operator">.</span><span class="field library">0</span><span class="semicolon">;</span>
|
||||
|
||||
<span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="logical">!</span><span class="bool_literal">true</span><span class="semicolon">;</span>
|
||||
|
||||
|
|
@ -170,7 +170,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="keyword">impl</span><span class="angle"><</span><span class="type_param declaration">T</span><span class="angle">></span> <span class="enum">Option</span><span class="angle"><</span><span class="type_param">T</span><span class="angle">></span> <span class="brace">{</span>
|
||||
<span class="keyword">fn</span> <span class="method associated consuming declaration">and</span><span class="angle"><</span><span class="type_param declaration">U</span><span class="angle">></span><span class="parenthesis">(</span><span class="self_keyword declaration">self</span><span class="comma">,</span> <span class="value_param declaration">other</span><span class="colon">:</span> <span class="enum">Option</span><span class="angle"><</span><span class="type_param">U</span><span class="angle">></span><span class="parenthesis">)</span> <span class="operator">-></span> <span class="enum">Option</span><span class="angle"><</span><span class="parenthesis">(</span><span class="type_param">T</span><span class="comma">,</span> <span class="type_param">U</span><span class="parenthesis">)</span><span class="angle">></span> <span class="brace">{</span>
|
||||
<span class="keyword control">match</span> <span class="value_param">other</span> <span class="brace">{</span>
|
||||
<span class="enum_variant">None</span> <span class="operator">=></span> <span class="unresolved_reference">unimplemented</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="comma">,</span>
|
||||
<span class="enum_variant">None</span> <span class="operator">=></span> <span class="unresolved_reference">unimplemented</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="comma">,</span>
|
||||
<span class="variable declaration">Nope</span> <span class="operator">=></span> <span class="variable">Nope</span><span class="comma">,</span>
|
||||
<span class="brace">}</span>
|
||||
<span class="brace">}</span>
|
||||
|
|
@ -184,7 +184,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="keyword async">async</span> <span class="keyword">fn</span> <span class="function async declaration">async_main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">f1</span> <span class="operator">=</span> <span class="function async">learn_and_sing</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="keyword">let</span> <span class="variable declaration">f2</span> <span class="operator">=</span> <span class="unresolved_reference">dance</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="unresolved_reference">futures</span><span class="operator">::</span><span class="unresolved_reference">join</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">f1</span><span class="comma macro">,</span> <span class="none macro">f2</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="unresolved_reference">futures</span><span class="operator">::</span><span class="unresolved_reference">join</span><span class="macro_bang">!</span><span class="parenthesis">(</span>f1<span class="comma">,</span> f2<span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="brace">}</span>
|
||||
|
||||
<span class="keyword">fn</span> <span class="function declaration">use_foo_items</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||
|
|
@ -196,7 +196,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="keyword">let</span> <span class="variable declaration">control_flow</span> <span class="operator">=</span> <span class="module crate_root library">foo</span><span class="operator">::</span><span class="function library">identity</span><span class="parenthesis">(</span><span class="module crate_root library">foo</span><span class="operator">::</span><span class="enum library">ControlFlow</span><span class="operator">::</span><span class="enum_variant library">Continue</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
|
||||
<span class="keyword control">if</span> <span class="variable">control_flow</span><span class="operator">.</span><span class="method consuming library">should_die</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||
<span class="module crate_root library">foo</span><span class="operator">::</span><span class="unresolved_reference">die</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="module crate_root library">foo</span><span class="operator">::</span><span class="unresolved_reference">die</span><span class="macro_bang">!</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="brace">}</span>
|
||||
<span class="brace">}</span>
|
||||
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
<span class="function">fixture</span><span class="parenthesis">(</span><span class="string_literal">r#"</span><span class="none injected">
|
||||
</span><span class="keyword injected">trait</span><span class="none injected"> </span><span class="trait declaration injected">Foo</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected">
|
||||
</span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function associated declaration injected static trait">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected">
|
||||
</span><span class="unresolved_reference injected">println</span><span class="macro_bang injected">!</span><span class="parenthesis injected macro">(</span><span class="string_literal injected macro">"2 + 2 = {}"</span><span class="comma injected macro">,</span><span class="none injected"> </span><span class="numeric_literal injected macro">4</span><span class="parenthesis injected macro">)</span><span class="semicolon injected">;</span><span class="none injected">
|
||||
</span><span class="unresolved_reference injected">println</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="string_literal injected">"2 + 2 = {}"</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">4</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="none injected">
|
||||
</span><span class="brace injected">}</span><span class="none injected">
|
||||
</span><span class="brace injected">}</span><span class="string_literal">"#</span>
|
||||
<span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
|
|
|
|||
|
|
@ -46,8 +46,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
|||
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
|
||||
</style>
|
||||
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||
<span class="macro public">template</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">template</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
|
||||
<span class="macro public">template</span><span class="macro_bang">!</span><span class="parenthesis">(</span>template<span class="parenthesis">)</span><span class="semicolon">;</span>
|
||||
<span class="brace">}</span>
|
||||
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="module attribute crate_root library">proc_macros</span><span class="operator attribute">::</span><span class="attribute attribute library">issue_18089</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="keyword">fn</span> <span class="macro declaration public">template</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre>
|
||||
<span class="keyword macro proc_macro">fn</span> <span class="macro declaration macro proc_macro public">template</span><span class="parenthesis macro proc_macro">(</span><span class="parenthesis macro proc_macro">)</span> <span class="brace macro proc_macro">{</span><span class="brace macro proc_macro">}</span></code></pre>
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue