From 2c7f2c105bb52d83409fc29bd181af442b8ba43d Mon Sep 17 00:00:00 2001 From: Nika Layzell Date: Mon, 25 Jul 2022 00:43:33 -0400 Subject: [PATCH 1/2] proc_macro/bridge: send diagnostics over the bridge as a struct This removes some RPC when creating and emitting diagnostics, and simplifies the bridge slightly. After this change, there are no remaining methods which take advantage of the support for `&mut` references to objects in the store as arguments, meaning that support for them could technically be removed if we wanted. The only remaining uses of immutable references into the store are `TokenStream` and `SourceFile`. --- .../src/abis/abi_sysroot/ra_server.rs | 59 ++----------------- 1 file changed, 4 insertions(+), 55 deletions(-) diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs index 46882845a8..c1f8615aaf 100644 --- a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs @@ -37,23 +37,6 @@ pub struct SourceFile { type Level = super::proc_macro::Level; type LineColumn = super::proc_macro::LineColumn; -/// A structure representing a diagnostic message and associated children -/// messages. -#[derive(Clone, Debug)] -pub struct Diagnostic { - level: Level, - message: String, - spans: Vec, - children: Vec, -} - -impl Diagnostic { - /// Creates a new diagnostic with the given `level` and `message`. - pub fn new>(level: Level, message: T) -> Diagnostic { - Diagnostic { level, message: message.into(), spans: vec![], children: vec![] } - } -} - pub struct FreeFunctions; #[derive(Default)] @@ -65,8 +48,6 @@ impl server::Types for RustAnalyzer { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; type SourceFile = SourceFile; - type MultiSpan = Vec; - type Diagnostic = Diagnostic; type Span = Span; type Symbol = Symbol; } @@ -90,6 +71,10 @@ impl server::FreeFunctions for RustAnalyzer { span: tt::TokenId::unspecified(), }) } + + fn emit_diagnostic(&mut self, _: bridge::Diagnostic) { + // FIXME handle diagnostic + } } impl server::TokenStream for RustAnalyzer { @@ -282,30 +267,6 @@ impl server::SourceFile for RustAnalyzer { } } -impl server::Diagnostic for RustAnalyzer { - fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic { - let mut diag = Diagnostic::new(level, msg); - diag.spans = spans; - diag - } - - fn sub( - &mut self, - _diag: &mut Self::Diagnostic, - _level: Level, - _msg: &str, - _spans: Self::MultiSpan, - ) { - // FIXME handle diagnostic - // - } - - fn emit(&mut self, _diag: Self::Diagnostic) { - // FIXME handle diagnostic - // diag.emit() - } -} - impl server::Span for RustAnalyzer { fn debug(&mut self, span: Self::Span) -> String { format!("{:?}", span.0) @@ -372,18 +333,6 @@ impl server::Span for RustAnalyzer { } } -impl server::MultiSpan for RustAnalyzer { - fn new(&mut self) -> Self::MultiSpan { - // FIXME handle span - vec![] - } - - fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) { - //TODP - other.push(span) - } -} - impl server::Symbol for RustAnalyzer { fn normalize_and_validate_ident(&mut self, string: &str) -> Result { // FIXME: nfc-normalize and validate idents From 65e1dc4d9c4d172488227ae0105196ff2e4ae6ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 6 Sep 2022 21:20:49 +0300 Subject: [PATCH 2/2] :arrow_up: rust-analyzer --- .github/workflows/ci.yaml | 132 +++--- .github/workflows/release.yaml | 4 +- crates/hir-def/src/body.rs | 19 +- crates/hir-def/src/body/lower.rs | 135 +++--- crates/hir-def/src/body/pretty.rs | 13 - crates/hir-def/src/body/scope.rs | 34 +- crates/hir-def/src/expr.rs | 6 +- crates/hir-def/src/nameres.rs | 15 +- crates/hir-def/src/nameres/collector.rs | 24 +- crates/hir-def/src/nameres/mod_resolution.rs | 3 +- .../src/nameres/tests/mod_resolution.rs | 16 + crates/hir-def/src/resolver.rs | 253 ++++++----- crates/hir-expand/src/lib.rs | 2 +- crates/hir-expand/src/name.rs | 1 + crates/hir-ty/src/autoderef.rs | 3 +- crates/hir-ty/src/diagnostics/expr.rs | 7 +- .../match_check/deconstruct_pat.rs | 16 +- .../src/diagnostics/match_check/usefulness.rs | 29 +- crates/hir-ty/src/infer.rs | 33 +- crates/hir-ty/src/infer/expr.rs | 187 ++++---- crates/hir-ty/src/inhabitedness.rs | 173 ++++++++ crates/hir-ty/src/lib.rs | 1 + crates/hir-ty/src/lower.rs | 57 ++- crates/hir-ty/src/tests/macros.rs | 14 +- crates/hir-ty/src/tests/regression.rs | 2 - crates/hir-ty/src/tests/simple.rs | 15 +- crates/hir-ty/src/tests/traits.rs | 92 ++++ crates/hir/src/diagnostics.rs | 1 + crates/hir/src/lib.rs | 6 +- crates/hir/src/source_analyzer.rs | 16 +- ...ert_two_arm_bool_match_to_matches_macro.rs | 294 +++++++++++++ .../extract_struct_from_enum_variant.rs | 115 +++-- .../src/handlers/replace_or_with_or_else.rs | 364 +++++++++++++++ .../replace_turbofish_with_explicit_type.rs | 160 ++++++- .../src/handlers/unmerge_match_arm.rs | 293 +++++++++++++ crates/ide-assists/src/lib.rs | 7 + crates/ide-assists/src/tests/generated.rs | 86 ++++ crates/ide-completion/src/completions/expr.rs | 24 +- crates/ide-completion/src/tests/expression.rs | 38 +- crates/ide-db/src/syntax_helpers/node_ext.rs | 1 - .../src/handlers/break_outside_of_loop.rs | 120 ++++- .../src/handlers/missing_match_arms.rs | 44 ++ crates/ide/src/inlay_hints.rs | 2 +- crates/parser/src/grammar/paths.rs | 5 + crates/parser/src/syntax_kind/generated.rs | 130 +++++- .../ok/0202_typepathfn_with_coloncolon.rast | 43 ++ .../ok/0202_typepathfn_with_coloncolon.rs | 1 + crates/rust-analyzer/src/global_state.rs | 2 + crates/rust-analyzer/src/handlers.rs | 12 +- crates/rust-analyzer/src/reload.rs | 4 +- crates/syntax/rust.ungram | 1 - crates/syntax/src/ast/generated/nodes.rs | 415 ++++++++++-------- crates/syntax/src/ast/make.rs | 263 +++++------ crates/syntax/src/ast/token_ext.rs | 21 +- crates/syntax/src/tests/sourcegen_ast.rs | 27 +- lib/la-arena/src/lib.rs | 37 ++ 56 files changed, 2922 insertions(+), 896 deletions(-) create mode 100644 crates/hir-ty/src/inhabitedness.rs create mode 100644 crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs create mode 100644 crates/ide-assists/src/handlers/replace_or_with_or_else.rs create mode 100644 crates/ide-assists/src/handlers/unmerge_match_arm.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a70252fa65..1563ee0b14 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -6,15 +6,15 @@ on: pull_request: push: branches: - - auto - - try + - auto + - try env: CARGO_INCREMENTAL: 0 CARGO_NET_RETRY: 10 CI: 1 RUST_BACKTRACE: short - RUSTFLAGS: "-D warnings -W unreachable-pub -W rust-2021-compatibility" + RUSTFLAGS: "-D warnings -W unreachable-pub -W bare-trait-objects" RUSTUP_MAX_RETRIES: 10 jobs: @@ -31,25 +31,25 @@ jobs: os: [ubuntu-latest, windows-latest, macos-latest] steps: - - name: Checkout repository - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha }} - fetch-depth: 20 + - name: Checkout repository + uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + fetch-depth: 20 - - name: Install Rust toolchain - run: | - rustup update --no-self-update stable - rustup component add rustfmt rust-src + - name: Install Rust toolchain + run: | + rustup update --no-self-update stable + rustup component add rustfmt rust-src - - name: Cache Dependencies - uses: Swatinem/rust-cache@ce325b60658c1b38465c06cc965b79baf32c1e72 + - name: Cache Dependencies + uses: Swatinem/rust-cache@ce325b60658c1b38465c06cc965b79baf32c1e72 - - name: Compile - run: cargo test --no-run --locked + - name: Compile + run: cargo test --no-run --locked - - name: Test - run: cargo test -- --nocapture --quiet + - name: Test + run: cargo test -- --nocapture --quiet # Weird targets to catch non-portable code rust-cross: @@ -64,25 +64,25 @@ jobs: targets_ide: "wasm32-unknown-unknown" steps: - - name: Checkout repository - uses: actions/checkout@v3 + - name: Checkout repository + uses: actions/checkout@v3 - - name: Install Rust toolchain - run: | - rustup update --no-self-update stable - rustup target add ${{ env.targets }} ${{ env.targets_ide }} + - name: Install Rust toolchain + run: | + rustup update --no-self-update stable + rustup target add ${{ env.targets }} ${{ env.targets_ide }} - - name: Cache Dependencies - uses: Swatinem/rust-cache@ce325b60658c1b38465c06cc965b79baf32c1e72 + - name: Cache Dependencies + uses: Swatinem/rust-cache@ce325b60658c1b38465c06cc965b79baf32c1e72 - - name: Check - run: | - for target in ${{ env.targets }}; do - cargo check --target=$target --all-targets - done - for target in ${{ env.targets_ide }}; do - cargo check -p ide --target=$target --all-targets - done + - name: Check + run: | + for target in ${{ env.targets }}; do + cargo check --target=$target --all-targets + done + for target in ${{ env.targets_ide }}; do + cargo check -p ide --target=$target --all-targets + done typescript: if: github.repository == 'rust-lang/rust-analyzer' @@ -95,47 +95,47 @@ jobs: runs-on: ${{ matrix.os }} steps: - - name: Checkout repository - uses: actions/checkout@v3 + - name: Checkout repository + uses: actions/checkout@v3 - - name: Install Nodejs - uses: actions/setup-node@v1 - with: - node-version: 16.x + - name: Install Nodejs + uses: actions/setup-node@v1 + with: + node-version: 16.x - - name: Install xvfb - if: matrix.os == 'ubuntu-latest' - run: sudo apt-get install -y xvfb + - name: Install xvfb + if: matrix.os == 'ubuntu-latest' + run: sudo apt-get install -y xvfb - - run: npm ci - working-directory: ./editors/code + - run: npm ci + working-directory: ./editors/code -# - run: npm audit || { sleep 10 && npm audit; } || { sleep 30 && npm audit; } -# if: runner.os == 'Linux' -# working-directory: ./editors/code + # - run: npm audit || { sleep 10 && npm audit; } || { sleep 30 && npm audit; } + # if: runner.os == 'Linux' + # working-directory: ./editors/code - - run: npm run lint - working-directory: ./editors/code + - run: npm run lint + working-directory: ./editors/code - - name: Run VS Code tests (Linux) - if: matrix.os == 'ubuntu-latest' - env: - VSCODE_CLI: 1 - run: xvfb-run npm test - working-directory: ./editors/code + - name: Run VS Code tests (Linux) + if: matrix.os == 'ubuntu-latest' + env: + VSCODE_CLI: 1 + run: xvfb-run npm test + working-directory: ./editors/code - - name: Run VS Code tests (Windows) - if: matrix.os == 'windows-latest' - env: - VSCODE_CLI: 1 - run: npm test - working-directory: ./editors/code + - name: Run VS Code tests (Windows) + if: matrix.os == 'windows-latest' + env: + VSCODE_CLI: 1 + run: npm test + working-directory: ./editors/code - - run: npm run pretest - working-directory: ./editors/code + - run: npm run pretest + working-directory: ./editors/code - - run: npm run package --scripts-prepend-node-path - working-directory: ./editors/code + - run: npm run package --scripts-prepend-node-path + working-directory: ./editors/code end-success: name: bors build finished diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index b2e5db6f68..303a10615b 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -248,7 +248,7 @@ jobs: if: github.ref == 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer') working-directory: ./editors/code # token from https://dev.azure.com/rust-analyzer/ - run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix + run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix || true - name: Publish Extension (Code Marketplace, nightly) if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer') @@ -258,4 +258,4 @@ jobs: - name: Publish Extension (OpenVSX, nightly) if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer') working-directory: ./editors/code - run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix + run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix || true diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 1d818d9626..22f5fb9926 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -250,6 +250,10 @@ pub type PatSource = InFile; pub type LabelPtr = AstPtr; pub type LabelSource = InFile; + +pub type FieldPtr = AstPtr; +pub type FieldSource = InFile; + /// An item body together with the mapping from syntax nodes to HIR expression /// IDs. This is needed to go from e.g. a position in a file to the HIR /// expression containing it; but for type inference etc., we want to operate on @@ -264,18 +268,18 @@ pub type LabelSource = InFile; #[derive(Default, Debug, Eq, PartialEq)] pub struct BodySourceMap { expr_map: FxHashMap, - expr_map_back: ArenaMap>, + expr_map_back: ArenaMap, pat_map: FxHashMap, - pat_map_back: ArenaMap>, + pat_map_back: ArenaMap, label_map: FxHashMap, label_map_back: ArenaMap, /// We don't create explicit nodes for record fields (`S { record_field: 92 }`). /// Instead, we use id of expression (`92`) to identify the field. - field_map: FxHashMap>, ExprId>, - field_map_back: FxHashMap>>, + field_map: FxHashMap, + field_map_back: FxHashMap, expansions: FxHashMap>, HirFileId>, @@ -420,7 +424,7 @@ impl Index for Body { // Perhaps `expr_syntax` and `expr_id`? impl BodySourceMap { pub fn expr_syntax(&self, expr: ExprId) -> Result { - self.expr_map_back[expr].clone() + self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax) } pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option { @@ -434,7 +438,7 @@ impl BodySourceMap { } pub fn pat_syntax(&self, pat: PatId) -> Result { - self.pat_map_back[pat].clone() + self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax) } pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option { @@ -456,9 +460,10 @@ impl BodySourceMap { self.label_map.get(&src).cloned() } - pub fn field_syntax(&self, expr: ExprId) -> InFile> { + pub fn field_syntax(&self, expr: ExprId) -> FieldSource { self.field_map_back[&expr].clone() } + pub fn node_field(&self, node: InFile<&ast::RecordExprField>) -> Option { let src = node.map(AstPtr::new); self.field_map.get(&src).cloned() diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index f6ec8bf7e9..3b3297f781 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -24,7 +24,7 @@ use syntax::{ use crate::{ adt::StructKind, - body::{Body, BodySourceMap, Expander, LabelSource, PatPtr, SyntheticSyntax}, + body::{Body, BodySourceMap, Expander, ExprPtr, LabelPtr, LabelSource, PatPtr}, body::{BodyDiagnostic, ExprSource, PatSource}, builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}, db::DefDatabase, @@ -150,21 +150,21 @@ impl ExprCollector<'_> { LowerCtx::new(self.db, self.expander.current_file_id) } - fn alloc_expr(&mut self, expr: Expr, ptr: AstPtr) -> ExprId { + fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId { let src = self.expander.to_source(ptr); - let id = self.make_expr(expr, Ok(src.clone())); + let id = self.make_expr(expr, src.clone()); self.source_map.expr_map.insert(src, id); id } // desugared exprs don't have ptr, that's wrong and should be fixed // somehow. fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { - self.make_expr(expr, Err(SyntheticSyntax)) + self.body.exprs.alloc(expr) } fn missing_expr(&mut self) -> ExprId { self.alloc_expr_desugared(Expr::Missing) } - fn make_expr(&mut self, expr: Expr, src: Result) -> ExprId { + fn make_expr(&mut self, expr: Expr, src: ExprSource) -> ExprId { let id = self.body.exprs.alloc(expr); self.source_map.expr_map_back.insert(id, src); id @@ -172,20 +172,20 @@ impl ExprCollector<'_> { fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { let src = self.expander.to_source(ptr); - let id = self.make_pat(pat, Ok(src.clone())); + let id = self.make_pat(pat, src.clone()); self.source_map.pat_map.insert(src, id); id } fn missing_pat(&mut self) -> PatId { - self.make_pat(Pat::Missing, Err(SyntheticSyntax)) + self.body.pats.alloc(Pat::Missing) } - fn make_pat(&mut self, pat: Pat, src: Result) -> PatId { + fn make_pat(&mut self, pat: Pat, src: PatSource) -> PatId { let id = self.body.pats.alloc(pat); self.source_map.pat_map_back.insert(id, src); id } - fn alloc_label(&mut self, label: Label, ptr: AstPtr) -> LabelId { + fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId { let src = self.expander.to_source(ptr); let id = self.make_label(label, src.clone()); self.source_map.label_map.insert(src, id); @@ -550,20 +550,6 @@ impl ExprCollector<'_> { None => self.alloc_expr(Expr::Missing, syntax_ptr), } } - ast::Expr::MacroStmts(e) => { - let statements: Box<[_]> = - e.statements().filter_map(|s| self.collect_stmt(s)).collect(); - let tail = e.expr().map(|e| self.collect_expr(e)); - - if e.syntax().children().next().is_none() { - // HACK: make sure that macros that expand to nothing aren't treated as a `()` - // expression when used in block tail position. - cov_mark::hit!(empty_macro_in_trailing_position_is_removed); - return None; - } - - self.alloc_expr(Expr::MacroStmts { tail, statements }, syntax_ptr) - } ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr), }) } @@ -640,11 +626,46 @@ impl ExprCollector<'_> { } } - fn collect_stmt(&mut self, s: ast::Stmt) -> Option { + fn collect_macro_as_stmt( + &mut self, + statements: &mut Vec, + mac: ast::MacroExpr, + ) -> Option { + let mac_call = mac.macro_call()?; + let syntax_ptr = AstPtr::new(&ast::Expr::from(mac)); + let macro_ptr = AstPtr::new(&mac_call); + let expansion = self.collect_macro_call( + mac_call, + macro_ptr, + false, + |this, expansion: Option| match expansion { + Some(expansion) => { + expansion.statements().for_each(|stmt| this.collect_stmt(statements, stmt)); + expansion.expr().and_then(|expr| match expr { + ast::Expr::MacroExpr(mac) => this.collect_macro_as_stmt(statements, mac), + expr => Some(this.collect_expr(expr)), + }) + } + None => None, + }, + ); + match expansion { + Some(tail) => { + // Make the macro-call point to its expanded expression so we can query + // semantics on syntax pointers to the macro + let src = self.expander.to_source(syntax_ptr); + self.source_map.expr_map.insert(src, tail); + Some(tail) + } + None => None, + } + } + + fn collect_stmt(&mut self, statements: &mut Vec, s: ast::Stmt) { match s { ast::Stmt::LetStmt(stmt) => { if self.check_cfg(&stmt).is_none() { - return None; + return; } let pat = self.collect_pat_opt(stmt.pat()); let type_ref = @@ -654,61 +675,26 @@ impl ExprCollector<'_> { .let_else() .and_then(|let_else| let_else.block_expr()) .map(|block| self.collect_block(block)); - Some(Statement::Let { pat, type_ref, initializer, else_branch }) + statements.push(Statement::Let { pat, type_ref, initializer, else_branch }); } ast::Stmt::ExprStmt(stmt) => { let expr = stmt.expr(); - if let Some(expr) = &expr { - if self.check_cfg(expr).is_none() { - return None; - } + match &expr { + Some(expr) if self.check_cfg(expr).is_none() => return, + _ => (), } let has_semi = stmt.semicolon_token().is_some(); // Note that macro could be expanded to multiple statements - if let Some(expr @ ast::Expr::MacroExpr(mac)) = &expr { - let mac_call = mac.macro_call()?; - let syntax_ptr = AstPtr::new(expr); - let macro_ptr = AstPtr::new(&mac_call); - let stmt = self.collect_macro_call( - mac_call, - macro_ptr, - false, - |this, expansion: Option| match expansion { - Some(expansion) => { - let statements = expansion - .statements() - .filter_map(|stmt| this.collect_stmt(stmt)) - .collect(); - let tail = expansion.expr().map(|expr| this.collect_expr(expr)); - - let mac_stmts = this.alloc_expr( - Expr::MacroStmts { tail, statements }, - AstPtr::new(&ast::Expr::MacroStmts(expansion)), - ); - - Some(mac_stmts) - } - None => None, - }, - ); - - let expr = match stmt { - Some(expr) => { - // Make the macro-call point to its expanded expression so we can query - // semantics on syntax pointers to the macro - let src = self.expander.to_source(syntax_ptr); - self.source_map.expr_map.insert(src, expr); - expr - } - None => self.alloc_expr(Expr::Missing, syntax_ptr), - }; - Some(Statement::Expr { expr, has_semi }) + if let Some(ast::Expr::MacroExpr(mac)) = expr { + if let Some(expr) = self.collect_macro_as_stmt(statements, mac) { + statements.push(Statement::Expr { expr, has_semi }) + } } else { let expr = self.collect_expr_opt(expr); - Some(Statement::Expr { expr, has_semi }) + statements.push(Statement::Expr { expr, has_semi }); } } - ast::Stmt::Item(_item) => None, + ast::Stmt::Item(_item) => (), } } @@ -729,9 +715,12 @@ impl ExprCollector<'_> { let prev_def_map = mem::replace(&mut self.expander.def_map, def_map); let prev_local_module = mem::replace(&mut self.expander.module, module); - let mut statements: Vec<_> = - block.statements().filter_map(|s| self.collect_stmt(s)).collect(); - let tail = block.tail_expr().and_then(|e| self.maybe_collect_expr(e)); + let mut statements = Vec::new(); + block.statements().for_each(|s| self.collect_stmt(&mut statements, s)); + let tail = block.tail_expr().and_then(|e| match e { + ast::Expr::MacroExpr(mac) => self.collect_macro_as_stmt(&mut statements, mac), + expr => self.maybe_collect_expr(expr), + }); let tail = tail.or_else(|| { let stmt = statements.pop()?; if let Statement::Expr { expr, has_semi: false } = stmt { diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index ddd476efe5..f2fed95444 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -422,19 +422,6 @@ impl<'a> Printer<'a> { } w!(self, "}}"); } - Expr::MacroStmts { statements, tail } => { - w!(self, "{{ // macro statements"); - self.indented(|p| { - for stmt in statements.iter() { - p.print_stmt(stmt); - } - if let Some(tail) = tail { - p.print_expr(*tail); - } - }); - self.newline(); - w!(self, "}}"); - } } } diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index f4c390dce2..45f64ebb06 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -47,16 +47,9 @@ pub struct ScopeData { impl ExprScopes { pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc { let body = db.body(def); - Arc::new(ExprScopes::new(&*body)) - } - - fn new(body: &Body) -> ExprScopes { - let mut scopes = - ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() }; - let mut root = scopes.root_scope(); - scopes.add_params_bindings(body, root, &body.params); - compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root); - scopes + let mut scopes = ExprScopes::new(&*body); + scopes.shrink_to_fit(); + Arc::new(scopes) } pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] { @@ -89,6 +82,17 @@ impl ExprScopes { pub fn scope_by_expr(&self) -> &FxHashMap { &self.scope_by_expr } +} + +impl ExprScopes { + fn new(body: &Body) -> ExprScopes { + let mut scopes = + ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() }; + let mut root = scopes.root_scope(); + scopes.add_params_bindings(body, root, &body.params); + compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root); + scopes + } fn root_scope(&mut self) -> ScopeId { self.scopes.alloc(ScopeData { parent: None, block: None, label: None, entries: vec![] }) @@ -138,6 +142,13 @@ impl ExprScopes { fn set_scope(&mut self, node: ExprId, scope: ScopeId) { self.scope_by_expr.insert(node, scope); } + + fn shrink_to_fit(&mut self) { + let ExprScopes { scopes, scope_by_expr } = self; + scopes.shrink_to_fit(); + scopes.values_mut().for_each(|it| it.entries.shrink_to_fit()); + scope_by_expr.shrink_to_fit(); + } } fn compute_block_scopes( @@ -176,9 +187,6 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope scopes.set_scope(expr, *scope); match &body[expr] { - Expr::MacroStmts { statements, tail } => { - compute_block_scopes(statements, *tail, body, scopes, scope); - } Expr::Block { statements, tail, id, label } => { let mut scope = scopes.new_block_scope(*scope, *id, make_label(label)); // Overwrite the old scope for the block expr, so that every block scope can be found diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 4381b43c25..419d3feec3 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -206,10 +206,6 @@ pub enum Expr { Unsafe { body: ExprId, }, - MacroStmts { - statements: Box<[Statement]>, - tail: Option, - }, Array(Array), Literal(Literal), Underscore, @@ -263,7 +259,7 @@ impl Expr { Expr::Let { expr, .. } => { f(*expr); } - Expr::MacroStmts { tail, statements } | Expr::Block { statements, tail, .. } => { + Expr::Block { statements, tail, .. } => { for stmt in statements.iter() { match stmt { Statement::Let { initializer, .. } => { diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 6eb530ecc5..9b4ce9f97c 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -64,7 +64,7 @@ use hir_expand::{name::Name, InFile, MacroCallId, MacroDefId}; use itertools::Itertools; use la_arena::Arena; use profile::Count; -use rustc_hash::FxHashMap; +use rustc_hash::{FxHashMap, FxHashSet}; use stdx::format_to; use syntax::{ast, SmolStr}; @@ -98,7 +98,11 @@ pub struct DefMap { /// The prelude module for this crate. This either comes from an import /// marked with the `prelude_import` attribute, or (in the normal case) from /// a dependency (`std` or `core`). + /// The prelude is empty for non-block DefMaps (unless `#[prelude_import]` was used, + /// but that attribute is nightly and when used in a block, it affects resolution globally + /// so we aren't handling this correctly anyways). prelude: Option, + /// The extern prelude is only populated for non-block DefMaps extern_prelude: FxHashMap, /// Side table for resolving derive helpers. @@ -114,6 +118,8 @@ pub struct DefMap { registered_attrs: Vec, /// Custom tool modules registered with `#![register_tool]`. registered_tools: Vec, + /// Unstable features of Rust enabled with `#![feature(A, B)]`. + unstable_features: FxHashSet, edition: Edition, recursion_limit: Option, @@ -284,6 +290,7 @@ impl DefMap { modules, registered_attrs: Vec::new(), registered_tools: Vec::new(), + unstable_features: FxHashSet::default(), diagnostics: Vec::new(), } } @@ -314,6 +321,10 @@ impl DefMap { &self.registered_attrs } + pub fn is_unstable_feature_enabled(&self, feature: &str) -> bool { + self.unstable_features.contains(feature) + } + pub fn root(&self) -> LocalModuleId { self.root } @@ -479,6 +490,7 @@ impl DefMap { registered_tools, fn_proc_macro_mapping, derive_helpers_in_scope, + unstable_features, proc_macro_loading_error: _, block: _, edition: _, @@ -496,6 +508,7 @@ impl DefMap { registered_tools.shrink_to_fit(); fn_proc_macro_mapping.shrink_to_fit(); derive_helpers_in_scope.shrink_to_fit(); + unstable_features.shrink_to_fit(); for (_, module) in modules.iter_mut() { module.children.shrink_to_fit(); module.scope.shrink_to_fit(); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 8a6bb929c3..495bbe4579 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -294,6 +294,17 @@ impl DefCollector<'_> { continue; } + if *attr_name == hir_expand::name![feature] { + let features = + attr.parse_path_comma_token_tree().into_iter().flatten().filter_map( + |feat| match feat.segments() { + [name] => Some(name.to_smol_str()), + _ => None, + }, + ); + self.def_map.unstable_features.extend(features); + } + let attr_is_register_like = *attr_name == hir_expand::name![register_attr] || *attr_name == hir_expand::name![register_tool]; if !attr_is_register_like { @@ -501,10 +512,9 @@ impl DefCollector<'_> { Edition::Edition2021 => name![rust_2021], }; - let path_kind = if self.def_map.edition == Edition::Edition2015 { - PathKind::Plain - } else { - PathKind::Abs + let path_kind = match self.def_map.edition { + Edition::Edition2015 => PathKind::Plain, + _ => PathKind::Abs, }; let path = ModPath::from_segments(path_kind, [krate.clone(), name![prelude], edition].into_iter()); @@ -524,7 +534,6 @@ impl DefCollector<'_> { match per_ns.types { Some((ModuleDefId::ModuleId(m), _)) => { self.def_map.prelude = Some(m); - return; } types => { tracing::debug!( @@ -839,7 +848,10 @@ impl DefCollector<'_> { tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 - if import.is_extern_crate && module_id == self.def_map.root { + if import.is_extern_crate + && self.def_map.block.is_none() + && module_id == self.def_map.root + { if let (Some(ModuleDefId::ModuleId(def)), Some(name)) = (def.take_types(), name) { self.def_map.extern_prelude.insert(name.clone(), def); diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 99f7f1b549..ca7bcc814e 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -65,6 +65,7 @@ impl ModDir { name: &Name, attr_path: Option<&SmolStr>, ) -> Result<(FileId, bool, ModDir), Box<[String]>> { + let name = name.unescaped(); let orig_file_id = file_id.original_file(db.upcast()); let mut candidate_files = ArrayVec::<_, 2>::new(); @@ -73,12 +74,10 @@ impl ModDir { candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner)) } None if file_id.is_include_macro(db.upcast()) => { - let name = name.unescaped(); candidate_files.push(format!("{}.rs", name)); candidate_files.push(format!("{}/mod.rs", name)); } None => { - let name = name.unescaped(); candidate_files.push(format!("{}{}.rs", self.dir_path.0, name)); candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name)); } diff --git a/crates/hir-def/src/nameres/tests/mod_resolution.rs b/crates/hir-def/src/nameres/tests/mod_resolution.rs index 3fa585574d..ba3bf8b5a5 100644 --- a/crates/hir-def/src/nameres/tests/mod_resolution.rs +++ b/crates/hir-def/src/nameres/tests/mod_resolution.rs @@ -127,7 +127,15 @@ mod r#async; use self::r#async::Bar; //- /async.rs +mod foo; +mod r#async; pub struct Bar; + +//- /async/foo.rs +pub struct Foo; + +//- /async/async.rs +pub struct Baz; "#, expect![[r#" crate @@ -136,6 +144,14 @@ pub struct Bar; crate::r#async Bar: t v + foo: t + r#async: t + + crate::r#async::foo + Foo: t v + + crate::r#async::r#async + Baz: t v "#]], ); } diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 3163fa0f93..8aa5973cac 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -31,12 +31,10 @@ pub struct Resolver { /// /// When using, you generally want to process the scopes in reverse order, /// there's `scopes` *method* for that. - /// - /// Invariant: There exists at least one Scope::ModuleScope at the start of the vec. scopes: Vec, + module_scope: ModuleItemMap, } -// FIXME how to store these best #[derive(Debug, Clone)] struct ModuleItemMap { def_map: Arc, @@ -53,7 +51,7 @@ struct ExprScope { #[derive(Debug, Clone)] enum Scope { /// All the items and imported names of a module - ModuleScope(ModuleItemMap), + BlockScope(ModuleItemMap), /// Brings the generic parameters of an item into scope GenericParams { def: GenericDefId, params: Interned }, /// Brings `Self` in `impl` block into scope @@ -127,24 +125,6 @@ impl Resolver { } } - fn scopes(&self) -> impl Iterator { - self.scopes.iter().rev() - } - - fn resolve_module_path( - &self, - db: &dyn DefDatabase, - path: &ModPath, - shadow: BuiltinShadowMode, - ) -> PerNs { - let (item_map, module) = self.module_scope(); - let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow); - if segment_index.is_some() { - return PerNs::none(); - } - module_res - } - pub fn resolve_module_path_in_items(&self, db: &dyn DefDatabase, path: &ModPath) -> PerNs { self.resolve_module_path(db, path, BuiltinShadowMode::Module) } @@ -155,7 +135,7 @@ impl Resolver { db: &dyn DefDatabase, path: &ModPath, ) -> Option { - let (item_map, module) = self.module_scope(); + let (item_map, module) = self.item_scope(); let (module_res, idx) = item_map.resolve_path(db, module, path, BuiltinShadowMode::Module); match module_res.take_types()? { ModuleDefId::TraitId(it) => { @@ -183,37 +163,38 @@ impl Resolver { ) -> Option<(TypeNs, Option)> { let first_name = path.segments().first()?; let skip_to_mod = path.kind != PathKind::Plain; + if skip_to_mod { + return self.module_scope.resolve_path_in_type_ns(db, path); + } + + let remaining_idx = || if path.segments().len() == 1 { None } else { Some(1) }; + for scope in self.scopes() { match scope { Scope::ExprScope(_) => continue, - Scope::GenericParams { .. } | Scope::ImplDefScope(_) if skip_to_mod => continue, - Scope::GenericParams { params, def } => { if let Some(id) = params.find_type_by_name(first_name, *def) { - let idx = if path.segments().len() == 1 { None } else { Some(1) }; - return Some((TypeNs::GenericParam(id), idx)); + return Some((TypeNs::GenericParam(id), remaining_idx())); } } - Scope::ImplDefScope(impl_) => { + &Scope::ImplDefScope(impl_) => { if first_name == &name![Self] { - let idx = if path.segments().len() == 1 { None } else { Some(1) }; - return Some((TypeNs::SelfType(*impl_), idx)); + return Some((TypeNs::SelfType(impl_), remaining_idx())); } } - Scope::AdtScope(adt) => { + &Scope::AdtScope(adt) => { if first_name == &name![Self] { - let idx = if path.segments().len() == 1 { None } else { Some(1) }; - return Some((TypeNs::AdtSelfType(*adt), idx)); + return Some((TypeNs::AdtSelfType(adt), remaining_idx())); } } - Scope::ModuleScope(m) => { + Scope::BlockScope(m) => { if let Some(res) = m.resolve_path_in_type_ns(db, path) { return Some(res); } } } } - None + self.module_scope.resolve_path_in_type_ns(db, path) } pub fn resolve_path_in_type_ns_fully( @@ -235,7 +216,7 @@ impl Resolver { ) -> Option { match visibility { RawVisibility::Module(_) => { - let (item_map, module) = self.module_scope(); + let (item_map, module) = self.item_scope(); item_map.resolve_visibility(db, module, visibility) } RawVisibility::Public => Some(Visibility::Public), @@ -251,18 +232,14 @@ impl Resolver { let tmp = name![self]; let first_name = if path.is_self() { &tmp } else { path.segments().first()? }; let skip_to_mod = path.kind != PathKind::Plain && !path.is_self(); + if skip_to_mod { + return self.module_scope.resolve_path_in_value_ns(db, path); + } + for scope in self.scopes() { match scope { - Scope::AdtScope(_) - | Scope::ExprScope(_) - | Scope::GenericParams { .. } - | Scope::ImplDefScope(_) - if skip_to_mod => - { - continue - } - - Scope::ExprScope(scope) if n_segments <= 1 => { + Scope::ExprScope(_) if n_segments > 1 => continue, + Scope::ExprScope(scope) => { let entry = scope .expr_scopes .entries(scope.scope_id) @@ -273,44 +250,39 @@ impl Resolver { return Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(e.pat()))); } } - Scope::ExprScope(_) => continue, - Scope::GenericParams { params, def } if n_segments > 1 => { if let Some(id) = params.find_type_by_name(first_name, *def) { let ty = TypeNs::GenericParam(id); return Some(ResolveValueResult::Partial(ty, 1)); } } - Scope::GenericParams { params, def } if n_segments == 1 => { + Scope::GenericParams { .. } if n_segments != 1 => continue, + Scope::GenericParams { params, def } => { if let Some(id) = params.find_const_by_name(first_name, *def) { let val = ValueNs::GenericParam(id); return Some(ResolveValueResult::ValueNs(val)); } } - Scope::GenericParams { .. } => continue, - Scope::ImplDefScope(impl_) => { + &Scope::ImplDefScope(impl_) => { if first_name == &name![Self] { - if n_segments > 1 { - let ty = TypeNs::SelfType(*impl_); - return Some(ResolveValueResult::Partial(ty, 1)); + return Some(if n_segments > 1 { + ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1) } else { - return Some(ResolveValueResult::ValueNs(ValueNs::ImplSelf(*impl_))); - } + ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_)) + }); } } + // bare `Self` doesn't work in the value namespace in a struct/enum definition + Scope::AdtScope(_) if n_segments == 1 => continue, Scope::AdtScope(adt) => { - if n_segments == 1 { - // bare `Self` doesn't work in the value namespace in a struct/enum definition - continue; - } if first_name == &name![Self] { let ty = TypeNs::AdtSelfType(*adt); return Some(ResolveValueResult::Partial(ty, 1)); } } - Scope::ModuleScope(m) => { + Scope::BlockScope(m) => { if let Some(def) = m.resolve_path_in_value_ns(db, path) { return Some(def); } @@ -318,15 +290,16 @@ impl Resolver { } } + if let res @ Some(_) = self.module_scope.resolve_path_in_value_ns(db, path) { + return res; + } + // If a path of the shape `u16::from_le_bytes` failed to resolve at all, then we fall back // to resolving to the primitive type, to allow this to still work in the presence of // `use core::u16;`. if path.kind == PathKind::Plain && path.segments().len() > 1 { - match BuiltinType::by_name(&path.segments()[0]) { - Some(builtin) => { - return Some(ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1)); - } - None => {} + if let Some(builtin) = BuiltinType::by_name(&path.segments()[0]) { + return Some(ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1)); } } @@ -345,7 +318,7 @@ impl Resolver { } pub fn resolve_path_as_macro(&self, db: &dyn DefDatabase, path: &ModPath) -> Option { - let (item_map, module) = self.module_scope(); + let (item_map, module) = self.item_scope(); item_map.resolve_path(db, module, path, BuiltinShadowMode::Other).0.take_macros() } @@ -395,30 +368,43 @@ impl Resolver { for scope in self.scopes() { scope.process_names(&mut res, db); } + let ModuleItemMap { ref def_map, module_id } = self.module_scope; + // FIXME: should we provide `self` here? + // f( + // Name::self_param(), + // PerNs::types(Resolution::Def { + // def: m.module.into(), + // }), + // ); + def_map[module_id].scope.entries().for_each(|(name, def)| { + res.add_per_ns(name, def); + }); + def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| { + macs.iter().for_each(|&mac| { + res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac)))); + }) + }); + def_map.extern_prelude().for_each(|(name, &def)| { + res.add(name, ScopeDef::ModuleDef(ModuleDefId::ModuleId(def))); + }); + BUILTIN_SCOPE.iter().for_each(|(name, &def)| { + res.add_per_ns(name, def); + }); + if let Some(prelude) = def_map.prelude() { + let prelude_def_map = prelude.def_map(db); + for (name, def) in prelude_def_map[prelude.local_id].scope.entries() { + res.add_per_ns(name, def) + } + } res.map } pub fn traits_in_scope(&self, db: &dyn DefDatabase) -> FxHashSet { let mut traits = FxHashSet::default(); + for scope in self.scopes() { match scope { - Scope::ModuleScope(m) => { - if let Some(prelude) = m.def_map.prelude() { - let prelude_def_map = prelude.def_map(db); - traits.extend(prelude_def_map[prelude.local_id].scope.traits()); - } - traits.extend(m.def_map[m.module_id].scope.traits()); - - // Add all traits that are in scope because of the containing DefMaps - m.def_map.with_ancestor_maps(db, m.module_id, &mut |def_map, module| { - if let Some(prelude) = def_map.prelude() { - let prelude_def_map = prelude.def_map(db); - traits.extend(prelude_def_map[prelude.local_id].scope.traits()); - } - traits.extend(def_map[module].scope.traits()); - None::<()> - }); - } + Scope::BlockScope(m) => traits.extend(m.def_map[m.module_id].scope.traits()), &Scope::ImplDefScope(impl_) => { if let Some(target_trait) = &db.impl_data(impl_).target_trait { if let Some(TypeNs::TraitId(trait_)) = @@ -431,35 +417,28 @@ impl Resolver { _ => (), } } + + // Fill in the prelude traits + if let Some(prelude) = self.module_scope.def_map.prelude() { + let prelude_def_map = prelude.def_map(db); + traits.extend(prelude_def_map[prelude.local_id].scope.traits()); + } + // Fill in module visible traits + traits.extend(self.module_scope.def_map[self.module_scope.module_id].scope.traits()); traits } - fn module_scope(&self) -> (&DefMap, LocalModuleId) { - self.scopes() - .find_map(|scope| match scope { - Scope::ModuleScope(m) => Some((&*m.def_map, m.module_id)), - _ => None, - }) - .expect("module scope invariant violated") - } - pub fn module(&self) -> ModuleId { - let (def_map, local_id) = self.module_scope(); + let (def_map, local_id) = self.item_scope(); def_map.module_id(local_id) } pub fn krate(&self) -> CrateId { - self.def_map().krate() + self.module_scope.def_map.krate() } pub fn def_map(&self) -> &DefMap { - self.scopes - .get(0) - .and_then(|scope| match scope { - Scope::ModuleScope(m) => Some(&m.def_map), - _ => None, - }) - .expect("module scope invariant violated") + self.item_scope().0 } pub fn where_predicates_in_scope( @@ -488,6 +467,36 @@ impl Resolver { } } +impl Resolver { + fn scopes(&self) -> impl Iterator { + self.scopes.iter().rev() + } + + fn resolve_module_path( + &self, + db: &dyn DefDatabase, + path: &ModPath, + shadow: BuiltinShadowMode, + ) -> PerNs { + let (item_map, module) = self.item_scope(); + let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow); + if segment_index.is_some() { + return PerNs::none(); + } + module_res + } + + /// The innermost block scope that contains items or the module scope that contains this resolver. + fn item_scope(&self) -> (&DefMap, LocalModuleId) { + self.scopes() + .find_map(|scope| match scope { + Scope::BlockScope(m) => Some((&*m.def_map, m.module_id)), + _ => None, + }) + .unwrap_or((&self.module_scope.def_map, self.module_scope.module_id)) + } +} + #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum ScopeDef { ModuleDef(ModuleDefId), @@ -502,14 +511,7 @@ pub enum ScopeDef { impl Scope { fn process_names(&self, acc: &mut ScopeNames, db: &dyn DefDatabase) { match self { - Scope::ModuleScope(m) => { - // FIXME: should we provide `self` here? - // f( - // Name::self_param(), - // PerNs::types(Resolution::Def { - // def: m.module.into(), - // }), - // ); + Scope::BlockScope(m) => { m.def_map[m.module_id].scope.entries().for_each(|(name, def)| { acc.add_per_ns(name, def); }); @@ -521,18 +523,6 @@ impl Scope { ); }) }); - m.def_map.extern_prelude().for_each(|(name, &def)| { - acc.add(name, ScopeDef::ModuleDef(ModuleDefId::ModuleId(def))); - }); - BUILTIN_SCOPE.iter().for_each(|(name, &def)| { - acc.add_per_ns(name, def); - }); - if let Some(prelude) = m.def_map.prelude() { - let prelude_def_map = prelude.def_map(db); - for (name, def) in prelude_def_map[prelude.local_id].scope.entries() { - acc.add_per_ns(name, def) - } - } } Scope::GenericParams { params, def: parent } => { let parent = *parent; @@ -596,7 +586,7 @@ pub fn resolver_for_scope( if let Some(block) = scopes.block(scope) { if let Some(def_map) = db.block_def_map(block) { let root = def_map.root(); - r = r.push_module_scope(def_map, root); + r = r.push_block_scope(def_map, root); // FIXME: This adds as many module scopes as there are blocks, but resolving in each // already traverses all parents, so this is O(n²). I think we could only store the // innermost module scope instead? @@ -623,8 +613,8 @@ impl Resolver { self.push_scope(Scope::ImplDefScope(impl_def)) } - fn push_module_scope(self, def_map: Arc, module_id: LocalModuleId) -> Resolver { - self.push_scope(Scope::ModuleScope(ModuleItemMap { def_map, module_id })) + fn push_block_scope(self, def_map: Arc, module_id: LocalModuleId) -> Resolver { + self.push_scope(Scope::BlockScope(ModuleItemMap { def_map, module_id })) } fn push_expr_scope( @@ -768,14 +758,19 @@ pub trait HasResolver: Copy { impl HasResolver for ModuleId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { let mut def_map = self.def_map(db); - let mut modules: SmallVec<[_; 2]> = smallvec![(def_map.clone(), self.local_id)]; + let mut modules: SmallVec<[_; 1]> = smallvec![]; + let mut module_id = self.local_id; while let Some(parent) = def_map.parent() { + modules.push((def_map, module_id)); def_map = parent.def_map(db); - modules.push((def_map.clone(), parent.local_id)); + module_id = parent.local_id; } - let mut resolver = Resolver { scopes: Vec::with_capacity(modules.len()) }; + let mut resolver = Resolver { + scopes: Vec::with_capacity(modules.len()), + module_scope: ModuleItemMap { def_map, module_id }, + }; for (def_map, module) in modules.into_iter().rev() { - resolver = resolver.push_module_scope(def_map, module); + resolver = resolver.push_block_scope(def_map, module); } resolver } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index d753d88470..fc128102f2 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -969,7 +969,7 @@ impl ExpandTo { if parent.kind() == MACRO_EXPR && parent .parent() - .map_or(true, |p| matches!(p.kind(), EXPR_STMT | STMT_LIST | MACRO_STMTS)) + .map_or(false, |p| matches!(p.kind(), EXPR_STMT | STMT_LIST | MACRO_STMTS)) { return ExpandTo::Statements; } diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 2b859f7750..4ce21a5796 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -336,6 +336,7 @@ pub mod known { test, test_case, recursion_limit, + feature, // Safe intrinsics abort, add_with_overflow, diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs index b6f226dbfd..344036dd81 100644 --- a/crates/hir-ty/src/autoderef.rs +++ b/crates/hir-ty/src/autoderef.rs @@ -104,8 +104,7 @@ pub(crate) fn deref(table: &mut InferenceTable<'_>, ty: Ty) -> Option { fn builtin_deref(ty: &Ty) -> Option<&Ty> { match ty.kind(Interner) { - TyKind::Ref(.., ty) => Some(ty), - TyKind::Raw(.., ty) => Some(ty), + TyKind::Ref(.., ty) | TyKind::Raw(.., ty) => Some(ty), _ => None, } } diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 642e03edd2..c8df4c796e 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -159,12 +159,7 @@ impl ExprValidator { } let pattern_arena = Arena::new(); - let cx = MatchCheckCtx { - module: self.owner.module(db.upcast()), - body: self.owner, - db, - pattern_arena: &pattern_arena, - }; + let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena); let mut m_arms = Vec::with_capacity(arms.len()); let mut has_lowering_errors = false; diff --git a/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs b/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs index bbbe539c13..47d60fc41e 100644 --- a/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs +++ b/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs @@ -52,7 +52,10 @@ use hir_def::{EnumVariantId, HasModule, LocalFieldId, VariantId}; use smallvec::{smallvec, SmallVec}; use stdx::never; -use crate::{infer::normalize, AdtId, Interner, Scalar, Ty, TyExt, TyKind}; +use crate::{ + infer::normalize, inhabitedness::is_enum_variant_uninhabited_from, AdtId, Interner, Scalar, Ty, + TyExt, TyKind, +}; use super::{ is_box, @@ -557,8 +560,8 @@ impl SplitWildcard { TyKind::Scalar(Scalar::Bool) => smallvec![make_range(0, 1, Scalar::Bool)], // TyKind::Array(..) if ... => unhandled(), TyKind::Array(..) | TyKind::Slice(..) => unhandled(), - &TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), ..) => { - let enum_data = cx.db.enum_data(enum_id); + TyKind::Adt(AdtId(hir_def::AdtId::EnumId(enum_id)), subst) => { + let enum_data = cx.db.enum_data(*enum_id); // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an // additional "unknown" constructor. @@ -591,14 +594,15 @@ impl SplitWildcard { let mut ctors: SmallVec<[_; 1]> = enum_data .variants .iter() - .filter(|&(_, _v)| { + .map(|(local_id, _)| EnumVariantId { parent: *enum_id, local_id }) + .filter(|&variant| { // If `exhaustive_patterns` is enabled, we exclude variants known to be // uninhabited. let is_uninhabited = is_exhaustive_pat_feature - && unimplemented!("after MatchCheckCtx.feature_exhaustive_patterns()"); + && is_enum_variant_uninhabited_from(variant, subst, cx.module, cx.db); !is_uninhabited }) - .map(|(local_id, _)| Variant(EnumVariantId { parent: enum_id, local_id })) + .map(Variant) .collect(); if is_secretly_empty || is_declared_nonexhaustive { diff --git a/crates/hir-ty/src/diagnostics/match_check/usefulness.rs b/crates/hir-ty/src/diagnostics/match_check/usefulness.rs index 1221327b95..c4d709a975 100644 --- a/crates/hir-ty/src/diagnostics/match_check/usefulness.rs +++ b/crates/hir-ty/src/diagnostics/match_check/usefulness.rs @@ -277,7 +277,7 @@ use hir_def::{AdtId, DefWithBodyId, HasModule, ModuleId}; use smallvec::{smallvec, SmallVec}; use typed_arena::Arena; -use crate::{db::HirDatabase, Ty, TyExt}; +use crate::{db::HirDatabase, inhabitedness::is_ty_uninhabited_from, Ty, TyExt}; use super::deconstruct_pat::{Constructor, DeconstructedPat, Fields, SplitWildcard}; @@ -289,13 +289,27 @@ pub(crate) struct MatchCheckCtx<'a, 'p> { pub(crate) db: &'a dyn HirDatabase, /// Lowered patterns from arms plus generated by the check. pub(crate) pattern_arena: &'p Arena>, + exhaustive_patterns: bool, } impl<'a, 'p> MatchCheckCtx<'a, 'p> { - pub(super) fn is_uninhabited(&self, _ty: &Ty) -> bool { - // FIXME(iDawer) implement exhaustive_patterns feature. More info in: - // Tracking issue for RFC 1872: exhaustive_patterns feature https://github.com/rust-lang/rust/issues/51085 - false + pub(crate) fn new( + module: ModuleId, + body: DefWithBodyId, + db: &'a dyn HirDatabase, + pattern_arena: &'p Arena>, + ) -> Self { + let def_map = db.crate_def_map(module.krate()); + let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); + Self { module, body, db, pattern_arena, exhaustive_patterns } + } + + pub(super) fn is_uninhabited(&self, ty: &Ty) -> bool { + if self.feature_exhaustive_patterns() { + is_ty_uninhabited_from(ty, self.module, self.db) + } else { + false + } } /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`. @@ -311,10 +325,9 @@ impl<'a, 'p> MatchCheckCtx<'a, 'p> { } } - // Rust feature described as "Allows exhaustive pattern matching on types that contain uninhabited types." + // Rust's unstable feature described as "Allows exhaustive pattern matching on types that contain uninhabited types." pub(super) fn feature_exhaustive_patterns(&self) -> bool { - // FIXME see MatchCheckCtx::is_uninhabited - false + self.exhaustive_patterns } } diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 5df48e5fdc..10ffde87ee 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -182,7 +182,7 @@ pub(crate) type InferResult = Result, TypeError>; #[derive(Debug, PartialEq, Eq, Clone)] pub enum InferenceDiagnostic { NoSuchField { expr: ExprId }, - BreakOutsideOfLoop { expr: ExprId }, + BreakOutsideOfLoop { expr: ExprId, is_break: bool }, MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize }, } @@ -418,18 +418,45 @@ pub(crate) struct InferenceContext<'a> { #[derive(Clone, Debug)] struct BreakableContext { + /// Whether this context contains at least one break expression. may_break: bool, + /// The coercion target of the context. coerce: CoerceMany, + /// The optional label of the context. label: Option, + kind: BreakableKind, +} + +#[derive(Clone, Debug)] +enum BreakableKind { + Block, + Loop, + /// A border is something like an async block, closure etc. Anything that prevents + /// breaking/continuing through + Border, } fn find_breakable<'c>( ctxs: &'c mut [BreakableContext], label: Option<&name::Name>, ) -> Option<&'c mut BreakableContext> { + let mut ctxs = ctxs + .iter_mut() + .rev() + .take_while(|it| matches!(it.kind, BreakableKind::Block | BreakableKind::Loop)); match label { - Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label), - None => ctxs.last_mut(), + Some(_) => ctxs.find(|ctx| ctx.label.as_ref() == label), + None => ctxs.find(|ctx| matches!(ctx.kind, BreakableKind::Loop)), + } +} + +fn find_continuable<'c>( + ctxs: &'c mut [BreakableContext], + label: Option<&name::Name>, +) -> Option<&'c mut BreakableContext> { + match label { + Some(_) => find_breakable(ctxs, label).filter(|it| matches!(it.kind, BreakableKind::Loop)), + None => find_breakable(ctxs, label), } } diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 2a13106390..2d04a864a2 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -10,7 +10,7 @@ use chalk_ir::{ cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind, }; use hir_def::{ - expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Statement, UnaryOp}, + expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, LabelId, Literal, Statement, UnaryOp}, generics::TypeOrConstParamData, path::{GenericArg, GenericArgs}, resolver::resolver_for_expr, @@ -23,7 +23,7 @@ use syntax::ast::RangeOp; use crate::{ autoderef::{self, Autoderef}, consteval, - infer::coerce::CoerceMany, + infer::{coerce::CoerceMany, find_continuable, BreakableKind}, lower::{ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode, }, @@ -120,32 +120,37 @@ impl<'a> InferenceContext<'a> { let ty = match label { Some(_) => { let break_ty = self.table.new_type_var(); - self.breakables.push(BreakableContext { - may_break: false, - coerce: CoerceMany::new(break_ty.clone()), - label: label.map(|label| self.body[label].name.clone()), - }); - let ty = self.infer_block( - tgt_expr, - statements, - *tail, - &Expectation::has_type(break_ty), + let (breaks, ty) = self.with_breakable_ctx( + BreakableKind::Block, + break_ty.clone(), + *label, + |this| { + this.infer_block( + tgt_expr, + statements, + *tail, + &Expectation::has_type(break_ty), + ) + }, ); - let ctxt = self.breakables.pop().expect("breakable stack broken"); - if ctxt.may_break { - ctxt.coerce.complete() - } else { - ty - } + breaks.unwrap_or(ty) } None => self.infer_block(tgt_expr, statements, *tail, expected), }; self.resolver = old_resolver; ty } - Expr::Unsafe { body } | Expr::Const { body } => self.infer_expr(*body, expected), + Expr::Unsafe { body } => self.infer_expr(*body, expected), + Expr::Const { body } => { + self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| { + this.infer_expr(*body, expected) + }) + .1 + } Expr::TryBlock { body } => { - let _inner = self.infer_expr(*body, expected); + self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| { + let _inner = this.infer_expr(*body, expected); + }); // FIXME should be std::result::Result<{inner}, _> self.err_ty() } @@ -154,7 +159,10 @@ impl<'a> InferenceContext<'a> { let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone()); - let inner_ty = self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty)); + let (_, inner_ty) = + self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| { + this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty)) + }); self.diverges = prev_diverges; self.return_ty = prev_ret_ty; @@ -166,54 +174,44 @@ impl<'a> InferenceContext<'a> { TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty)) .intern(Interner) } - Expr::Loop { body, label } => { - self.breakables.push(BreakableContext { - may_break: false, - coerce: CoerceMany::new(self.table.new_type_var()), - label: label.map(|label| self.body[label].name.clone()), - }); - self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit())); + &Expr::Loop { body, label } => { + let ty = self.table.new_type_var(); + let (breaks, ()) = + self.with_breakable_ctx(BreakableKind::Loop, ty, label, |this| { + this.infer_expr(body, &Expectation::has_type(TyBuilder::unit())); + }); - let ctxt = self.breakables.pop().expect("breakable stack broken"); - - if ctxt.may_break { - self.diverges = Diverges::Maybe; - ctxt.coerce.complete() - } else { - TyKind::Never.intern(Interner) + match breaks { + Some(breaks) => { + self.diverges = Diverges::Maybe; + breaks + } + None => TyKind::Never.intern(Interner), } } - Expr::While { condition, body, label } => { - self.breakables.push(BreakableContext { - may_break: false, - coerce: CoerceMany::new(self.err_ty()), - label: label.map(|label| self.body[label].name.clone()), + &Expr::While { condition, body, label } => { + self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| { + this.infer_expr( + condition, + &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)), + ); + this.infer_expr(body, &Expectation::has_type(TyBuilder::unit())); }); - self.infer_expr( - *condition, - &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)), - ); - self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit())); - let _ctxt = self.breakables.pop().expect("breakable stack broken"); + // the body may not run, so it diverging doesn't mean we diverge self.diverges = Diverges::Maybe; TyBuilder::unit() } - Expr::For { iterable, body, pat, label } => { - let iterable_ty = self.infer_expr(*iterable, &Expectation::none()); - - self.breakables.push(BreakableContext { - may_break: false, - coerce: CoerceMany::new(self.err_ty()), - label: label.map(|label| self.body[label].name.clone()), - }); + &Expr::For { iterable, body, pat, label } => { + let iterable_ty = self.infer_expr(iterable, &Expectation::none()); let pat_ty = self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item()); - self.infer_pat(*pat, &pat_ty, BindingMode::default()); + self.infer_pat(pat, &pat_ty, BindingMode::default()); + self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| { + this.infer_expr(body, &Expectation::has_type(TyBuilder::unit())); + }); - self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit())); - let _ctxt = self.breakables.pop().expect("breakable stack broken"); // the body may not run, so it diverging doesn't mean we diverge self.diverges = Diverges::Maybe; TyBuilder::unit() @@ -269,7 +267,9 @@ impl<'a> InferenceContext<'a> { let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone()); - self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty)); + self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| { + this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty)); + }); self.diverges = prev_diverges; self.return_ty = prev_ret_ty; @@ -372,37 +372,45 @@ impl<'a> InferenceContext<'a> { let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr); self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or_else(|| self.err_ty()) } - Expr::Continue { .. } => TyKind::Never.intern(Interner), - Expr::Break { expr, label } => { - let mut coerce = match find_breakable(&mut self.breakables, label.as_ref()) { - Some(ctxt) => { - // avoiding the borrowck - mem::replace( - &mut ctxt.coerce, - CoerceMany::new(self.result.standard_types.unknown.clone()), - ) - } - None => CoerceMany::new(self.result.standard_types.unknown.clone()), + Expr::Continue { label } => { + if let None = find_continuable(&mut self.breakables, label.as_ref()) { + self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop { + expr: tgt_expr, + is_break: false, + }); }; - + TyKind::Never.intern(Interner) + } + Expr::Break { expr, label } => { let val_ty = if let Some(expr) = *expr { self.infer_expr(expr, &Expectation::none()) } else { TyBuilder::unit() }; - // FIXME: create a synthetic `()` during lowering so we have something to refer to here? - coerce.coerce(self, *expr, &val_ty); + match find_breakable(&mut self.breakables, label.as_ref()) { + Some(ctxt) => { + // avoiding the borrowck + let mut coerce = mem::replace( + &mut ctxt.coerce, + CoerceMany::new(self.result.standard_types.unknown.clone()), + ); - if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) { - ctxt.coerce = coerce; - ctxt.may_break = true; - } else { - self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop { - expr: tgt_expr, - }); - }; + // FIXME: create a synthetic `()` during lowering so we have something to refer to here? + coerce.coerce(self, *expr, &val_ty); + let ctxt = find_breakable(&mut self.breakables, label.as_ref()) + .expect("breakable stack changed during coercion"); + ctxt.coerce = coerce; + ctxt.may_break = true; + } + None => { + self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop { + expr: tgt_expr, + is_break: true, + }); + } + } TyKind::Never.intern(Interner) } Expr::Return { expr } => { @@ -794,9 +802,6 @@ impl<'a> InferenceContext<'a> { None => self.table.new_float_var(), }, }, - Expr::MacroStmts { tail, statements } => { - self.infer_block(tgt_expr, statements, *tail, expected) - } Expr::Underscore => { // Underscore expressions may only appear in assignee expressions, // which are handled by `infer_assignee_expr()`, so any underscore @@ -1475,4 +1480,20 @@ impl<'a> InferenceContext<'a> { }, }) } + + fn with_breakable_ctx( + &mut self, + kind: BreakableKind, + ty: Ty, + label: Option, + cb: impl FnOnce(&mut Self) -> T, + ) -> (Option, T) { + self.breakables.push({ + let label = label.map(|label| self.body[label].name.clone()); + BreakableContext { kind, may_break: false, coerce: CoerceMany::new(ty), label } + }); + let res = cb(self); + let ctx = self.breakables.pop().expect("breakable stack broken"); + (ctx.may_break.then(|| ctx.coerce.complete()), res) + } } diff --git a/crates/hir-ty/src/inhabitedness.rs b/crates/hir-ty/src/inhabitedness.rs new file mode 100644 index 0000000000..0c547192ac --- /dev/null +++ b/crates/hir-ty/src/inhabitedness.rs @@ -0,0 +1,173 @@ +//! Type inhabitedness logic. +use std::ops::ControlFlow::{self, Break, Continue}; + +use chalk_ir::{ + visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, + DebruijnIndex, +}; +use hir_def::{ + adt::VariantData, attr::Attrs, type_ref::ConstScalar, visibility::Visibility, AdtId, + EnumVariantId, HasModule, Lookup, ModuleId, VariantId, +}; + +use crate::{ + db::HirDatabase, Binders, ConcreteConst, Const, ConstValue, Interner, Substitution, Ty, TyKind, +}; + +/// Checks whether a type is visibly uninhabited from a particular module. +pub(crate) fn is_ty_uninhabited_from(ty: &Ty, target_mod: ModuleId, db: &dyn HirDatabase) -> bool { + let mut uninhabited_from = UninhabitedFrom { target_mod, db }; + let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST); + inhabitedness == BREAK_VISIBLY_UNINHABITED +} + +/// Checks whether a variant is visibly uninhabited from a particular module. +pub(crate) fn is_enum_variant_uninhabited_from( + variant: EnumVariantId, + subst: &Substitution, + target_mod: ModuleId, + db: &dyn HirDatabase, +) -> bool { + let enum_data = db.enum_data(variant.parent); + let vars_attrs = db.variants_attrs(variant.parent); + let is_local = variant.parent.lookup(db.upcast()).container.krate() == target_mod.krate(); + + let mut uninhabited_from = UninhabitedFrom { target_mod, db }; + let inhabitedness = uninhabited_from.visit_variant( + variant.into(), + &enum_data.variants[variant.local_id].variant_data, + subst, + &vars_attrs[variant.local_id], + is_local, + ); + inhabitedness == BREAK_VISIBLY_UNINHABITED +} + +struct UninhabitedFrom<'a> { + target_mod: ModuleId, + db: &'a dyn HirDatabase, +} + +const CONTINUE_OPAQUELY_INHABITED: ControlFlow = Continue(()); +const BREAK_VISIBLY_UNINHABITED: ControlFlow = Break(VisiblyUninhabited); +#[derive(PartialEq, Eq)] +struct VisiblyUninhabited; + +impl TypeVisitor for UninhabitedFrom<'_> { + type BreakTy = VisiblyUninhabited; + + fn as_dyn(&mut self) -> &mut dyn TypeVisitor { + self + } + + fn visit_ty( + &mut self, + ty: &Ty, + outer_binder: DebruijnIndex, + ) -> ControlFlow { + match ty.kind(Interner) { + TyKind::Adt(adt, subst) => self.visit_adt(adt.0, subst), + TyKind::Never => BREAK_VISIBLY_UNINHABITED, + TyKind::Tuple(..) => ty.super_visit_with(self, outer_binder), + TyKind::Array(item_ty, len) => match try_usize_const(len) { + Some(0) | None => CONTINUE_OPAQUELY_INHABITED, + Some(1..) => item_ty.super_visit_with(self, outer_binder), + }, + + TyKind::Ref(..) | _ => CONTINUE_OPAQUELY_INHABITED, + } + } + + fn interner(&self) -> Interner { + Interner + } +} + +impl UninhabitedFrom<'_> { + fn visit_adt(&mut self, adt: AdtId, subst: &Substitution) -> ControlFlow { + let attrs = self.db.attrs(adt.into()); + let adt_non_exhaustive = attrs.by_key("non_exhaustive").exists(); + let is_local = adt.module(self.db.upcast()).krate() == self.target_mod.krate(); + if adt_non_exhaustive && !is_local { + return CONTINUE_OPAQUELY_INHABITED; + } + + // An ADT is uninhabited iff all its variants uninhabited. + match adt { + // rustc: For now, `union`s are never considered uninhabited. + AdtId::UnionId(_) => CONTINUE_OPAQUELY_INHABITED, + AdtId::StructId(s) => { + let struct_data = self.db.struct_data(s); + self.visit_variant(s.into(), &struct_data.variant_data, subst, &attrs, is_local) + } + AdtId::EnumId(e) => { + let vars_attrs = self.db.variants_attrs(e); + let enum_data = self.db.enum_data(e); + + for (local_id, enum_var) in enum_data.variants.iter() { + let variant_inhabitedness = self.visit_variant( + EnumVariantId { parent: e, local_id }.into(), + &enum_var.variant_data, + subst, + &vars_attrs[local_id], + is_local, + ); + match variant_inhabitedness { + Break(VisiblyUninhabited) => continue, + Continue(()) => return CONTINUE_OPAQUELY_INHABITED, + } + } + BREAK_VISIBLY_UNINHABITED + } + } + } + + fn visit_variant( + &mut self, + variant: VariantId, + variant_data: &VariantData, + subst: &Substitution, + attrs: &Attrs, + is_local: bool, + ) -> ControlFlow { + let non_exhaustive_field_list = attrs.by_key("non_exhaustive").exists(); + if non_exhaustive_field_list && !is_local { + return CONTINUE_OPAQUELY_INHABITED; + } + + let is_enum = matches!(variant, VariantId::EnumVariantId(..)); + let field_tys = self.db.field_types(variant); + let field_vis = self.db.field_visibilities(variant); + + for (fid, _) in variant_data.fields().iter() { + self.visit_field(field_vis[fid], &field_tys[fid], subst, is_enum)?; + } + CONTINUE_OPAQUELY_INHABITED + } + + fn visit_field( + &mut self, + vis: Visibility, + ty: &Binders, + subst: &Substitution, + is_enum: bool, + ) -> ControlFlow { + if is_enum || vis.is_visible_from(self.db.upcast(), self.target_mod) { + let ty = ty.clone().substitute(Interner, subst); + ty.visit_with(self, DebruijnIndex::INNERMOST) + } else { + CONTINUE_OPAQUELY_INHABITED + } + } +} + +fn try_usize_const(c: &Const) -> Option { + let data = &c.data(Interner); + if data.ty.kind(Interner) != &TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)) { + return None; + } + match data.value { + ConstValue::Concrete(ConcreteConst { interned: ConstScalar::UInt(value) }) => Some(value), + _ => None, + } +} diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 5a5d610e36..a82a331d4b 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -14,6 +14,7 @@ mod chalk_db; mod chalk_ext; pub mod consteval; mod infer; +mod inhabitedness; mod interner; mod lower; mod mapping; diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 3f6d0844e9..4a37a79453 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -1,8 +1,8 @@ //! Methods for lowering the HIR to types. There are two main cases here: //! //! - Lowering a type reference like `&usize` or `Option` to a -//! type: The entry point for this is `Ty::from_hir`. -//! - Building the type for an item: This happens through the `type_for_def` query. +//! type: The entry point for this is `TyLoweringContext::lower_ty`. +//! - Building the type for an item: This happens through the `ty` query. //! //! This usually involves resolving names, collecting generic arguments etc. use std::{ @@ -47,7 +47,7 @@ use crate::{ consteval::{intern_const_scalar, path_to_const, unknown_const, unknown_const_as_generic}, db::HirDatabase, make_binders, - mapping::ToChalk, + mapping::{from_chalk_trait_id, ToChalk}, static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx, utils::Generics, utils::{all_super_trait_refs, associated_type_by_name_including_super_traits, generics}, @@ -332,7 +332,10 @@ impl<'a> TyLoweringContext<'a> { TypeRef::Macro(macro_call) => { let (mut expander, recursion_start) = { match RefMut::filter_map(self.expander.borrow_mut(), Option::as_mut) { + // There already is an expander here, this means we are already recursing Ok(expander) => (expander, false), + // No expander was created yet, so we are at the start of the expansion recursion + // and therefore have to create an expander. Err(expander) => ( RefMut::map(expander, |it| { it.insert(Expander::new( @@ -362,9 +365,14 @@ impl<'a> TyLoweringContext<'a> { .exit(self.db.upcast(), mark); Some(ty) } - _ => None, + _ => { + drop(expander); + None + } } }; + + // drop the expander, resetting it to pre-recursion state if recursion_start { *self.expander.borrow_mut() = None; } @@ -974,13 +982,44 @@ impl<'a> TyLoweringContext<'a> { fn lower_dyn_trait(&self, bounds: &[Interned]) -> Ty { let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner); let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { - QuantifiedWhereClauses::from_iter( + let bounds = + bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)); + + let mut auto_traits = SmallVec::<[_; 8]>::new(); + let mut regular_traits = SmallVec::<[_; 2]>::new(); + let mut other_bounds = SmallVec::<[_; 8]>::new(); + for bound in bounds { + if let Some(id) = bound.trait_id() { + if ctx.db.trait_data(from_chalk_trait_id(id)).is_auto { + auto_traits.push(bound); + } else { + regular_traits.push(bound); + } + } else { + other_bounds.push(bound); + } + } + + if regular_traits.len() > 1 { + return None; + } + + auto_traits.sort_unstable_by_key(|b| b.trait_id().unwrap()); + auto_traits.dedup(); + + Some(QuantifiedWhereClauses::from_iter( Interner, - bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)), - ) + regular_traits.into_iter().chain(other_bounds).chain(auto_traits), + )) }); - let bounds = crate::make_single_type_binders(bounds); - TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner) + + if let Some(bounds) = bounds { + let bounds = crate::make_single_type_binders(bounds); + TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner) + } else { + // FIXME: report error (additional non-auto traits) + TyKind::Error.intern(Interner) + } } fn lower_impl_trait( diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs index a1ab6060e7..b3adafaafd 100644 --- a/crates/hir-ty/src/tests/macros.rs +++ b/crates/hir-ty/src/tests/macros.rs @@ -193,8 +193,6 @@ fn expr_macro_def_expanded_in_various_places() { !0..6 '1isize': isize !0..6 '1isize': isize !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize 39..442 '{ ...!(); }': () 73..94 'spam!(...am!())': {unknown} 100..119 'for _ ...!() {}': () @@ -276,8 +274,6 @@ fn expr_macro_rules_expanded_in_various_places() { !0..6 '1isize': isize !0..6 '1isize': isize !0..6 '1isize': isize - !0..6 '1isize': isize - !0..6 '1isize': isize 53..456 '{ ...!(); }': () 87..108 'spam!(...am!())': {unknown} 114..133 'for _ ...!() {}': () @@ -312,7 +308,6 @@ fn expr_macro_expanded_in_stmts() { } "#, expect![[r#" - !0..8 'leta=();': () !3..4 'a': () !5..7 '()': () 57..84 '{ ...); } }': () @@ -321,7 +316,7 @@ fn expr_macro_expanded_in_stmts() { } #[test] -fn recurisve_macro_expanded_in_stmts() { +fn recursive_macro_expanded_in_stmts() { check_infer( r#" macro_rules! ng { @@ -340,11 +335,6 @@ fn recurisve_macro_expanded_in_stmts() { } "#, expect![[r#" - !0..7 'leta=3;': () - !0..13 'ng!{[leta=3]}': () - !0..13 'ng!{[leta=]3}': () - !0..13 'ng!{[leta]=3}': () - !0..13 'ng!{[let]a=3}': () !3..4 'a': i32 !5..6 '3': i32 196..237 '{ ...= a; }': () @@ -369,8 +359,6 @@ fn recursive_inner_item_macro_rules() { "#, expect![[r#" !0..1 '1': i32 - !0..7 'mac!($)': () - !0..26 'macro_...>{1};}': () 107..143 '{ ...!(); }': () 129..130 'a': i32 "#]], diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index c7895db1af..23e51a9c16 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -573,7 +573,6 @@ fn issue_6811() { } "#, expect![[r#" - !0..16 'let_a=...t_b=1;': () !3..5 '_a': i32 !6..7 '1': i32 !11..13 '_b': i32 @@ -1679,7 +1678,6 @@ fn main() { #[test] fn trailing_empty_macro() { - cov_mark::check!(empty_macro_in_trailing_position_is_removed); check_no_mismatches( r#" macro_rules! m2 { diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 5b08f55210..4ea103e5d9 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -2549,7 +2549,6 @@ impl B for Astruct {} expect![[r#" 569..573 'self': Box<[T], A> 602..634 '{ ... }': Vec - 612..628 'unimpl...ted!()': Vec 648..761 '{ ...t]); }': () 658..661 'vec': Vec 664..679 '<[_]>::into_vec': fn into_vec(Box<[i32], Global>) -> Vec @@ -3070,3 +3069,17 @@ fn main() { "#, ); } + +#[test] +fn nested_break() { + check_no_mismatches( + r#" +fn func() { + let int = loop { + break 0; + break (break 0); + }; +} + "#, + ); +} diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 0f37970e2b..e67c27aa2d 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -3833,3 +3833,95 @@ fn test() { "#, ) } + +#[test] +fn dyn_multiple_auto_traits_in_different_order() { + check_no_mismatches( + r#" +auto trait Send {} +auto trait Sync {} + +fn f(t: &(dyn Sync + Send)) {} +fn g(t: &(dyn Send + Sync)) { + f(t); +} + "#, + ); + + check_no_mismatches( + r#" +auto trait Send {} +auto trait Sync {} +trait T {} + +fn f(t: &(dyn T + Send + Sync)) {} +fn g(t: &(dyn Sync + T + Send)) { + f(t); +} + "#, + ); + + check_infer_with_mismatches( + r#" +auto trait Send {} +auto trait Sync {} +trait T1 {} +trait T2 {} + +fn f(t: &(dyn T1 + T2 + Send + Sync)) {} +fn g(t: &(dyn Sync + T2 + T1 + Send)) { + f(t); +} + "#, + expect![[r#" + 68..69 't': &{unknown} + 101..103 '{}': () + 109..110 't': &{unknown} + 142..155 '{ f(t); }': () + 148..149 'f': fn f(&{unknown}) + 148..152 'f(t)': () + 150..151 't': &{unknown} + "#]], + ); + + check_no_mismatches( + r#" +auto trait Send {} +auto trait Sync {} +trait T { + type Proj: Send + Sync; +} + +fn f(t: &(dyn T + Send + Sync)) {} +fn g(t: &(dyn Sync + T + Send)) { + f(t); +} + "#, + ); +} + +#[test] +fn dyn_duplicate_auto_trait() { + check_no_mismatches( + r#" +auto trait Send {} + +fn f(t: &(dyn Send + Send)) {} +fn g(t: &(dyn Send)) { + f(t); +} + "#, + ); + + check_no_mismatches( + r#" +auto trait Send {} +trait T {} + +fn f(t: &(dyn T + Send + Send)) {} +fn g(t: &(dyn T + Send)) { + f(t); +} + "#, + ); +} diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 50374f4b3f..5edc16d8bc 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -124,6 +124,7 @@ pub struct NoSuchField { #[derive(Debug)] pub struct BreakOutsideOfLoop { pub expr: InFile>, + pub is_break: bool, } #[derive(Debug)] diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 6dccf2ed20..e4bb63a864 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -1216,11 +1216,11 @@ impl DefWithBody { let field = source_map.field_syntax(*expr); acc.push(NoSuchField { field }.into()) } - hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr } => { + &hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => { let expr = source_map - .expr_syntax(*expr) + .expr_syntax(expr) .expect("break outside of loop in synthetic syntax"); - acc.push(BreakOutsideOfLoop { expr }.into()) + acc.push(BreakOutsideOfLoop { expr, is_break }.into()) } hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => { match source_map.expr_syntax(*call_expr) { diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index bd35af06e2..342912b678 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -140,11 +140,19 @@ impl SourceAnalyzer { ) -> Option> { let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?; let expanded = db.parse_or_expand(macro_file)?; - - let res = match ast::MacroCall::cast(expanded.clone()) { - Some(call) => self.expand_expr(db, InFile::new(macro_file, call))?, - _ => InFile::new(macro_file, ast::Expr::cast(expanded)?), + let res = if let Some(stmts) = ast::MacroStmts::cast(expanded.clone()) { + match stmts.expr()? { + ast::Expr::MacroExpr(mac) => { + self.expand_expr(db, InFile::new(macro_file, mac.macro_call()?))? + } + expr => InFile::new(macro_file, expr), + } + } else if let Some(call) = ast::MacroCall::cast(expanded.clone()) { + self.expand_expr(db, InFile::new(macro_file, call))? + } else { + InFile::new(macro_file, ast::Expr::cast(expanded)?) }; + Some(res) } diff --git a/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs b/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs new file mode 100644 index 0000000000..54a7f480a4 --- /dev/null +++ b/crates/ide-assists/src/handlers/convert_two_arm_bool_match_to_matches_macro.rs @@ -0,0 +1,294 @@ +use syntax::ast::{self, AstNode}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: convert_two_arm_bool_match_to_matches_macro +// +// Convert 2-arm match that evaluates to a boolean into the equivalent matches! invocation. +// +// ``` +// fn main() { +// match scrutinee$0 { +// Some(val) if val.cond() => true, +// _ => false, +// } +// } +// ``` +// -> +// ``` +// fn main() { +// matches!(scrutinee, Some(val) if val.cond()) +// } +// ``` +pub(crate) fn convert_two_arm_bool_match_to_matches_macro( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + let match_expr = ctx.find_node_at_offset::()?; + let match_arm_list = match_expr.match_arm_list()?; + let mut arms = match_arm_list.arms(); + let first_arm = arms.next()?; + let second_arm = arms.next()?; + if arms.next().is_some() { + cov_mark::hit!(non_two_arm_match); + return None; + } + let first_arm_expr = first_arm.expr(); + let second_arm_expr = second_arm.expr(); + + let invert_matches = if is_bool_literal_expr(&first_arm_expr, true) + && is_bool_literal_expr(&second_arm_expr, false) + { + false + } else if is_bool_literal_expr(&first_arm_expr, false) + && is_bool_literal_expr(&second_arm_expr, true) + { + true + } else { + cov_mark::hit!(non_invert_bool_literal_arms); + return None; + }; + + let target_range = ctx.sema.original_range(match_expr.syntax()).range; + let expr = match_expr.expr()?; + + acc.add( + AssistId("convert_two_arm_bool_match_to_matches_macro", AssistKind::RefactorRewrite), + "Convert to matches!", + target_range, + |builder| { + let mut arm_str = String::new(); + if let Some(ref pat) = first_arm.pat() { + arm_str += &pat.to_string(); + } + if let Some(ref guard) = first_arm.guard() { + arm_str += &format!(" {}", &guard.to_string()); + } + if invert_matches { + builder.replace(target_range, format!("!matches!({}, {})", expr, arm_str)); + } else { + builder.replace(target_range, format!("matches!({}, {})", expr, arm_str)); + } + }, + ) +} + +fn is_bool_literal_expr(expr: &Option, expect_bool: bool) -> bool { + if let Some(ast::Expr::Literal(lit)) = expr { + if let ast::LiteralKind::Bool(b) = lit.kind() { + return b == expect_bool; + } + } + + return false; +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + use super::convert_two_arm_bool_match_to_matches_macro; + + #[test] + fn not_applicable_outside_of_range_left() { + check_assist_not_applicable( + convert_two_arm_bool_match_to_matches_macro, + r#" +fn foo(a: Option) -> bool { + $0 match a { + Some(_val) => true, + _ => false + } +} + "#, + ); + } + + #[test] + fn not_applicable_non_two_arm_match() { + cov_mark::check!(non_two_arm_match); + check_assist_not_applicable( + convert_two_arm_bool_match_to_matches_macro, + r#" +fn foo(a: Option) -> bool { + match a$0 { + Some(3) => true, + Some(4) => true, + _ => false + } +} + "#, + ); + } + + #[test] + fn not_applicable_non_bool_literal_arms() { + cov_mark::check!(non_invert_bool_literal_arms); + check_assist_not_applicable( + convert_two_arm_bool_match_to_matches_macro, + r#" +fn foo(a: Option) -> bool { + match a$0 { + Some(val) => val == 3, + _ => false + } +} + "#, + ); + } + #[test] + fn not_applicable_both_false_arms() { + cov_mark::check!(non_invert_bool_literal_arms); + check_assist_not_applicable( + convert_two_arm_bool_match_to_matches_macro, + r#" +fn foo(a: Option) -> bool { + match a$0 { + Some(val) => false, + _ => false + } +} + "#, + ); + } + + #[test] + fn not_applicable_both_true_arms() { + cov_mark::check!(non_invert_bool_literal_arms); + check_assist_not_applicable( + convert_two_arm_bool_match_to_matches_macro, + r#" +fn foo(a: Option) -> bool { + match a$0 { + Some(val) => true, + _ => true + } +} + "#, + ); + } + + #[test] + fn convert_simple_case() { + check_assist( + convert_two_arm_bool_match_to_matches_macro, + r#" +fn foo(a: Option) -> bool { + match a$0 { + Some(_val) => true, + _ => false + } +} +"#, + r#" +fn foo(a: Option) -> bool { + matches!(a, Some(_val)) +} +"#, + ); + } + + #[test] + fn convert_simple_invert_case() { + check_assist( + convert_two_arm_bool_match_to_matches_macro, + r#" +fn foo(a: Option) -> bool { + match a$0 { + Some(_val) => false, + _ => true + } +} +"#, + r#" +fn foo(a: Option) -> bool { + !matches!(a, Some(_val)) +} +"#, + ); + } + + #[test] + fn convert_with_guard_case() { + check_assist( + convert_two_arm_bool_match_to_matches_macro, + r#" +fn foo(a: Option) -> bool { + match a$0 { + Some(val) if val > 3 => true, + _ => false + } +} +"#, + r#" +fn foo(a: Option) -> bool { + matches!(a, Some(val) if val > 3) +} +"#, + ); + } + + #[test] + fn convert_enum_match_cases() { + check_assist( + convert_two_arm_bool_match_to_matches_macro, + r#" +enum X { A, B } + +fn foo(a: X) -> bool { + match a$0 { + X::A => true, + _ => false + } +} +"#, + r#" +enum X { A, B } + +fn foo(a: X) -> bool { + matches!(a, X::A) +} +"#, + ); + } + + #[test] + fn convert_target_simple() { + check_assist_target( + convert_two_arm_bool_match_to_matches_macro, + r#" +fn foo(a: Option) -> bool { + match a$0 { + Some(val) => true, + _ => false + } +} +"#, + r#"match a { + Some(val) => true, + _ => false + }"#, + ); + } + + #[test] + fn convert_target_complex() { + check_assist_target( + convert_two_arm_bool_match_to_matches_macro, + r#" +enum E { X, Y } + +fn main() { + match E::X$0 { + E::X => true, + _ => false, + } +} +"#, + "match E::X { + E::X => true, + _ => false, + }", + ); + } +} diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index dfb5652126..ddc2052e7a 100644 --- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -101,21 +101,22 @@ pub(crate) fn extract_struct_from_enum_variant( }); } - let indent = enum_ast.indent_level(); let generic_params = enum_ast .generic_param_list() .and_then(|known_generics| extract_generic_params(&known_generics, &field_list)); let generics = generic_params.as_ref().map(|generics| generics.clone_for_update()); let def = create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast); + + let enum_ast = variant.parent_enum(); + let indent = enum_ast.indent_level(); def.reindent_to(indent); - let start_offset = &variant.parent_enum().syntax().clone(); - ted::insert_all_raw( - ted::Position::before(start_offset), + ted::insert_all( + ted::Position::before(enum_ast.syntax()), vec![ def.syntax().clone().into(), - make::tokens::whitespace(&format!("\n\n{}", indent)).into(), + make::tokens::whitespace(&format!("\n\n{indent}")).into(), ], ); @@ -227,7 +228,7 @@ fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, b } fn create_struct_def( - variant_name: ast::Name, + name: ast::Name, variant: &ast::Variant, field_list: &Either, generics: Option, @@ -269,43 +270,27 @@ fn create_struct_def( field_list.into() } }; - field_list.reindent_to(IndentLevel::single()); - let strukt = make::struct_(enum_vis, variant_name, generics, field_list).clone_for_update(); + let strukt = make::struct_(enum_vis, name, generics, field_list).clone_for_update(); - // FIXME: Consider making this an actual function somewhere (like in `AttrsOwnerEdit`) after some deliberation - let attrs_and_docs = |node: &SyntaxNode| { - let mut select_next_ws = false; - node.children_with_tokens().filter(move |child| { - let accept = match child.kind() { - ATTR | COMMENT => { - select_next_ws = true; - return true; - } - WHITESPACE if select_next_ws => true, - _ => false, - }; - select_next_ws = false; - - accept - }) - }; - - // copy attributes & comments from variant - let variant_attrs = attrs_and_docs(variant.syntax()) - .map(|tok| match tok.kind() { - WHITESPACE => make::tokens::single_newline().into(), - _ => tok, - }) - .collect(); - ted::insert_all(ted::Position::first_child_of(strukt.syntax()), variant_attrs); + // take comments from variant + ted::insert_all( + ted::Position::first_child_of(strukt.syntax()), + take_all_comments(variant.syntax()), + ); // copy attributes from enum ted::insert_all( ted::Position::first_child_of(strukt.syntax()), - enum_.attrs().map(|it| it.syntax().clone_for_update().into()).collect(), + enum_ + .attrs() + .flat_map(|it| { + vec![it.syntax().clone_for_update().into(), make::tokens::single_newline().into()] + }) + .collect(), ); + strukt } @@ -346,16 +331,48 @@ fn update_variant(variant: &ast::Variant, generics: Option Vec { + let mut remove_next_ws = false; + node.children_with_tokens() + .filter_map(move |child| match child.kind() { + COMMENT => { + remove_next_ws = true; + child.detach(); + Some(child) + } + WHITESPACE if remove_next_ws => { + remove_next_ws = false; + child.detach(); + Some(make::tokens::single_newline().into()) + } + _ => { + remove_next_ws = false; + None + } + }) + .collect() +} + fn apply_references( insert_use_cfg: InsertUseConfig, segment: ast::PathSegment, @@ -480,10 +497,14 @@ enum En { Var(Var) }"#, fn test_extract_struct_carries_over_attributes() { check_assist( extract_struct_from_enum_variant, - r#"#[derive(Debug)] + r#" +#[derive(Debug)] #[derive(Clone)] enum Enum { Variant{ field: u32$0 } }"#, - r#"#[derive(Debug)]#[derive(Clone)] struct Variant{ field: u32 } + r#" +#[derive(Debug)] +#[derive(Clone)] +struct Variant{ field: u32 } #[derive(Debug)] #[derive(Clone)] @@ -614,7 +635,7 @@ enum A { One(One) }"#, } #[test] - fn test_extract_struct_keep_comments_and_attrs_on_variant_struct() { + fn test_extract_struct_move_struct_variant_comments() { check_assist( extract_struct_from_enum_variant, r#" @@ -631,19 +652,19 @@ enum A { /* comment */ // other /// comment -#[attr] struct One{ a: u32 } enum A { + #[attr] One(One) }"#, ); } #[test] - fn test_extract_struct_keep_comments_and_attrs_on_variant_tuple() { + fn test_extract_struct_move_tuple_variant_comments() { check_assist( extract_struct_from_enum_variant, r#" @@ -658,10 +679,10 @@ enum A { /* comment */ // other /// comment -#[attr] struct One(u32, u32); enum A { + #[attr] One(One) }"#, ); diff --git a/crates/ide-assists/src/handlers/replace_or_with_or_else.rs b/crates/ide-assists/src/handlers/replace_or_with_or_else.rs new file mode 100644 index 0000000000..7d91be6210 --- /dev/null +++ b/crates/ide-assists/src/handlers/replace_or_with_or_else.rs @@ -0,0 +1,364 @@ +use ide_db::{ + assists::{AssistId, AssistKind}, + famous_defs::FamousDefs, +}; +use syntax::{ + ast::{self, make, Expr, HasArgList}, + AstNode, +}; + +use crate::{AssistContext, Assists}; + +// Assist: replace_or_with_or_else +// +// Replace `unwrap_or` with `unwrap_or_else` and `ok_or` with `ok_or_else`. +// +// ``` +// # //- minicore:option +// fn foo() { +// let a = Some(1); +// a.unwra$0p_or(2); +// } +// ``` +// -> +// ``` +// fn foo() { +// let a = Some(1); +// a.unwrap_or_else(|| 2); +// } +// ``` +pub(crate) fn replace_or_with_or_else(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let call: ast::MethodCallExpr = ctx.find_node_at_offset()?; + + let kind = is_option_or_result(call.receiver()?, ctx)?; + + let (name, arg_list) = (call.name_ref()?, call.arg_list()?); + + let mut map_or = false; + + let replace = match &*name.text() { + "unwrap_or" => "unwrap_or_else".to_string(), + "or" => "or_else".to_string(), + "ok_or" if kind == Kind::Option => "ok_or_else".to_string(), + "map_or" => { + map_or = true; + "map_or_else".to_string() + } + _ => return None, + }; + + let arg = match arg_list.args().collect::>().as_slice() { + [] => make::arg_list(Vec::new()), + [first] => { + let param = into_closure(first); + make::arg_list(vec![param]) + } + [first, second] if map_or => { + let param = into_closure(first); + make::arg_list(vec![param, second.clone()]) + } + _ => return None, + }; + + acc.add( + AssistId("replace_or_with_or_else", AssistKind::RefactorRewrite), + format!("Replace {} with {}", name.text(), replace), + call.syntax().text_range(), + |builder| { + builder.replace(name.syntax().text_range(), replace); + builder.replace_ast(arg_list, arg) + }, + ) +} + +fn into_closure(param: &Expr) -> Expr { + (|| { + if let ast::Expr::CallExpr(call) = param { + if call.arg_list()?.args().count() == 0 { + Some(call.expr()?.clone()) + } else { + None + } + } else { + None + } + })() + .unwrap_or_else(|| make::expr_closure(None, param.clone())) +} + +// Assist: replace_or_else_with_or +// +// Replace `unwrap_or_else` with `unwrap_or` and `ok_or_else` with `ok_or`. +// +// ``` +// # //- minicore:option +// fn foo() { +// let a = Some(1); +// a.unwra$0p_or_else(|| 2); +// } +// ``` +// -> +// ``` +// fn foo() { +// let a = Some(1); +// a.unwrap_or(2); +// } +// ``` +pub(crate) fn replace_or_else_with_or(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let call: ast::MethodCallExpr = ctx.find_node_at_offset()?; + + let kind = is_option_or_result(call.receiver()?, ctx)?; + + let (name, arg_list) = (call.name_ref()?, call.arg_list()?); + + let mut map_or = false; + let replace = match &*name.text() { + "unwrap_or_else" => "unwrap_or".to_string(), + "or_else" => "or".to_string(), + "ok_or_else" if kind == Kind::Option => "ok_or".to_string(), + "map_or_else" => { + map_or = true; + "map_or".to_string() + } + _ => return None, + }; + + let arg = match arg_list.args().collect::>().as_slice() { + [] => make::arg_list(Vec::new()), + [first] => { + let param = into_call(first); + make::arg_list(vec![param]) + } + [first, second] if map_or => { + let param = into_call(first); + make::arg_list(vec![param, second.clone()]) + } + _ => return None, + }; + + acc.add( + AssistId("replace_or_else_with_or", AssistKind::RefactorRewrite), + format!("Replace {} with {}", name.text(), replace), + call.syntax().text_range(), + |builder| { + builder.replace(name.syntax().text_range(), replace); + builder.replace_ast(arg_list, arg) + }, + ) +} + +fn into_call(param: &Expr) -> Expr { + (|| { + if let ast::Expr::ClosureExpr(closure) = param { + if closure.param_list()?.params().count() == 0 { + Some(closure.body()?.clone()) + } else { + None + } + } else { + None + } + })() + .unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new()))) +} + +#[derive(PartialEq, Eq)] +enum Kind { + Option, + Result, +} + +fn is_option_or_result(receiver: Expr, ctx: &AssistContext<'_>) -> Option { + let ty = ctx.sema.type_of_expr(&receiver)?.adjusted().as_adt()?.as_enum()?; + let option_enum = + FamousDefs(&ctx.sema, ctx.sema.scope(receiver.syntax())?.krate()).core_option_Option(); + + if let Some(option_enum) = option_enum { + if ty == option_enum { + return Some(Kind::Option); + } + } + + let result_enum = + FamousDefs(&ctx.sema, ctx.sema.scope(receiver.syntax())?.krate()).core_result_Result(); + + if let Some(result_enum) = result_enum { + if ty == result_enum { + return Some(Kind::Result); + } + } + + None +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn replace_or_with_or_else_simple() { + check_assist( + replace_or_with_or_else, + r#" +//- minicore: option +fn foo() { + let foo = Some(1); + return foo.unwrap_$0or(2); +} +"#, + r#" +fn foo() { + let foo = Some(1); + return foo.unwrap_or_else(|| 2); +} +"#, + ) + } + + #[test] + fn replace_or_with_or_else_call() { + check_assist( + replace_or_with_or_else, + r#" +//- minicore: option +fn foo() { + let foo = Some(1); + return foo.unwrap_$0or(x()); +} +"#, + r#" +fn foo() { + let foo = Some(1); + return foo.unwrap_or_else(x); +} +"#, + ) + } + + #[test] + fn replace_or_with_or_else_block() { + check_assist( + replace_or_with_or_else, + r#" +//- minicore: option +fn foo() { + let foo = Some(1); + return foo.unwrap_$0or({ + let mut x = bar(); + for i in 0..10 { + x += i; + } + x + }); +} +"#, + r#" +fn foo() { + let foo = Some(1); + return foo.unwrap_or_else(|| { + let mut x = bar(); + for i in 0..10 { + x += i; + } + x + }); +} +"#, + ) + } + + #[test] + fn replace_or_else_with_or_simple() { + check_assist( + replace_or_else_with_or, + r#" +//- minicore: option +fn foo() { + let foo = Some(1); + return foo.unwrap_$0or_else(|| 2); +} +"#, + r#" +fn foo() { + let foo = Some(1); + return foo.unwrap_or(2); +} +"#, + ) + } + + #[test] + fn replace_or_else_with_or_call() { + check_assist( + replace_or_else_with_or, + r#" +//- minicore: option +fn foo() { + let foo = Some(1); + return foo.unwrap_$0or_else(x); +} +"#, + r#" +fn foo() { + let foo = Some(1); + return foo.unwrap_or(x()); +} +"#, + ) + } + + #[test] + fn replace_or_else_with_or_result() { + check_assist( + replace_or_else_with_or, + r#" +//- minicore: result +fn foo() { + let foo = Ok(1); + return foo.unwrap_$0or_else(x); +} +"#, + r#" +fn foo() { + let foo = Ok(1); + return foo.unwrap_or(x()); +} +"#, + ) + } + + #[test] + fn replace_or_else_with_or_map() { + check_assist( + replace_or_else_with_or, + r#" +//- minicore: result +fn foo() { + let foo = Ok("foo"); + return foo.map$0_or_else(|| 42, |v| v.len()); +} +"#, + r#" +fn foo() { + let foo = Ok("foo"); + return foo.map_or(42, |v| v.len()); +} +"#, + ) + } + + #[test] + fn replace_or_else_with_or_not_applicable() { + check_assist_not_applicable( + replace_or_else_with_or, + r#" +fn foo() { + let foo = Ok(1); + return foo.unwrap_$0or_else(x); +} +"#, + ) + } +} diff --git a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs index 5242f3b510..521447c26d 100644 --- a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs +++ b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs @@ -1,5 +1,6 @@ +use hir::HirDisplay; use syntax::{ - ast::{Expr, GenericArg}, + ast::{Expr, GenericArg, GenericArgList}, ast::{LetStmt, Type::InferType}, AstNode, TextRange, }; @@ -34,21 +35,7 @@ pub(crate) fn replace_turbofish_with_explicit_type( let initializer = let_stmt.initializer()?; - let generic_args = match &initializer { - Expr::MethodCallExpr(ce) => ce.generic_arg_list()?, - Expr::CallExpr(ce) => { - if let Expr::PathExpr(pe) = ce.expr()? { - pe.path()?.segment()?.generic_arg_list()? - } else { - cov_mark::hit!(not_applicable_if_non_path_function_call); - return None; - } - } - _ => { - cov_mark::hit!(not_applicable_if_non_function_call_initializer); - return None; - } - }; + let generic_args = generic_arg_list(&initializer)?; // Find range of ::<_> let colon2 = generic_args.coloncolon_token()?; @@ -65,7 +52,16 @@ pub(crate) fn replace_turbofish_with_explicit_type( // An improvement would be to check that this is correctly part of the return value of the // function call, or sub in the actual return type. - let turbofish_type = &turbofish_args[0]; + let returned_type = match ctx.sema.type_of_expr(&initializer) { + Some(returned_type) if !returned_type.original.contains_unknown() => { + let module = ctx.sema.scope(let_stmt.syntax())?.module(); + returned_type.original.display_source_code(ctx.db(), module.into()).ok()? + } + _ => { + cov_mark::hit!(fallback_to_turbofish_type_if_type_info_not_available); + turbofish_args[0].to_string() + } + }; let initializer_start = initializer.syntax().text_range().start(); if ctx.offset() > turbofish_range.end() || ctx.offset() < initializer_start { @@ -83,7 +79,7 @@ pub(crate) fn replace_turbofish_with_explicit_type( "Replace turbofish with explicit type", TextRange::new(initializer_start, turbofish_range.end()), |builder| { - builder.insert(ident_range.end(), format!(": {}", turbofish_type)); + builder.insert(ident_range.end(), format!(": {}", returned_type)); builder.delete(turbofish_range); }, ); @@ -98,7 +94,7 @@ pub(crate) fn replace_turbofish_with_explicit_type( "Replace `_` with turbofish type", turbofish_range, |builder| { - builder.replace(underscore_range, turbofish_type.to_string()); + builder.replace(underscore_range, returned_type); builder.delete(turbofish_range); }, ); @@ -107,6 +103,26 @@ pub(crate) fn replace_turbofish_with_explicit_type( None } +fn generic_arg_list(expr: &Expr) -> Option { + match expr { + Expr::MethodCallExpr(expr) => expr.generic_arg_list(), + Expr::CallExpr(expr) => { + if let Expr::PathExpr(pe) = expr.expr()? { + pe.path()?.segment()?.generic_arg_list() + } else { + cov_mark::hit!(not_applicable_if_non_path_function_call); + return None; + } + } + Expr::AwaitExpr(expr) => generic_arg_list(&expr.expr()?), + Expr::TryExpr(expr) => generic_arg_list(&expr.expr()?), + _ => { + cov_mark::hit!(not_applicable_if_non_function_call_initializer); + None + } + } +} + #[cfg(test)] mod tests { use super::*; @@ -115,6 +131,7 @@ mod tests { #[test] fn replaces_turbofish_for_vec_string() { + cov_mark::check!(fallback_to_turbofish_type_if_type_info_not_available); check_assist( replace_turbofish_with_explicit_type, r#" @@ -135,6 +152,7 @@ fn main() { #[test] fn replaces_method_calls() { // foo.make() is a method call which uses a different expr in the let initializer + cov_mark::check!(fallback_to_turbofish_type_if_type_info_not_available); check_assist( replace_turbofish_with_explicit_type, r#" @@ -237,6 +255,110 @@ fn make() -> T {} fn main() { let a = make$0::, i32>(); } +"#, + ); + } + + #[test] + fn replaces_turbofish_for_known_type() { + check_assist( + replace_turbofish_with_explicit_type, + r#" +fn make() -> T {} +fn main() { + let a = make$0::(); +} +"#, + r#" +fn make() -> T {} +fn main() { + let a: i32 = make(); +} +"#, + ); + check_assist( + replace_turbofish_with_explicit_type, + r#" +//- minicore: option +fn make() -> T {} +fn main() { + let a = make$0::>(); +} +"#, + r#" +fn make() -> T {} +fn main() { + let a: Option = make(); +} +"#, + ); + } + + #[test] + fn replaces_turbofish_not_same_type() { + check_assist( + replace_turbofish_with_explicit_type, + r#" +//- minicore: option +fn make() -> Option {} +fn main() { + let a = make$0::(); +} +"#, + r#" +fn make() -> Option {} +fn main() { + let a: Option = make(); +} +"#, + ); + } + + #[test] + fn replaces_turbofish_for_type_with_defaulted_generic_param() { + check_assist( + replace_turbofish_with_explicit_type, + r#" +struct HasDefault(T, U); +fn make() -> HasDefault {} +fn main() { + let a = make$0::(); +} +"#, + r#" +struct HasDefault(T, U); +fn make() -> HasDefault {} +fn main() { + let a: HasDefault = make(); +} +"#, + ); + } + + #[test] + fn replaces_turbofish_try_await() { + check_assist( + replace_turbofish_with_explicit_type, + r#" +//- minicore: option, future +struct Fut(T); +impl core::future::Future for Fut { + type Output = Option; +} +fn make() -> Fut {} +fn main() { + let a = make$0::().await?; +} +"#, + r#" +struct Fut(T); +impl core::future::Future for Fut { + type Output = Option; +} +fn make() -> Fut {} +fn main() { + let a: bool = make().await?; +} "#, ); } diff --git a/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/crates/ide-assists/src/handlers/unmerge_match_arm.rs new file mode 100644 index 0000000000..9565f0ee6f --- /dev/null +++ b/crates/ide-assists/src/handlers/unmerge_match_arm.rs @@ -0,0 +1,293 @@ +use syntax::{ + algo::neighbor, + ast::{self, edit::IndentLevel, make, AstNode}, + ted::{self, Position}, + Direction, SyntaxKind, T, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: unmerge_match_arm +// +// Splits the current match with a `|` pattern into two arms with identical bodies. +// +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// Action::Move(..) $0| Action::Stop => foo(), +// } +// } +// ``` +// -> +// ``` +// enum Action { Move { distance: u32 }, Stop } +// +// fn handle(action: Action) { +// match action { +// Action::Move(..) => foo(), +// Action::Stop => foo(), +// } +// } +// ``` +pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let pipe_token = ctx.find_token_syntax_at_offset(T![|])?; + let or_pat = ast::OrPat::cast(pipe_token.parent()?)?.clone_for_update(); + let match_arm = ast::MatchArm::cast(or_pat.syntax().parent()?)?; + let match_arm_body = match_arm.expr()?; + + // We don't need to check for leading pipe because it is directly under `MatchArm` + // without `OrPat`. + + let new_parent = match_arm.syntax().parent()?; + let old_parent_range = new_parent.text_range(); + + acc.add( + AssistId("unmerge_match_arm", AssistKind::RefactorRewrite), + "Unmerge match arm", + pipe_token.text_range(), + |edit| { + let pats_after = pipe_token + .siblings_with_tokens(Direction::Next) + .filter_map(|it| ast::Pat::cast(it.into_node()?)); + // FIXME: We should add a leading pipe if the original arm has one. + let new_match_arm = make::match_arm( + pats_after, + match_arm.guard().and_then(|guard| guard.condition()), + match_arm_body, + ) + .clone_for_update(); + + let mut pipe_index = pipe_token.index(); + if pipe_token + .prev_sibling_or_token() + .map_or(false, |it| it.kind() == SyntaxKind::WHITESPACE) + { + pipe_index -= 1; + } + or_pat.syntax().splice_children( + pipe_index..or_pat.syntax().children_with_tokens().count(), + Vec::new(), + ); + + let mut insert_after_old_arm = Vec::new(); + + // A comma can be: + // - After the arm. In this case we always want to insert a comma after the newly + // inserted arm. + // - Missing after the arm, with no arms after. In this case we want to insert a + // comma before the newly inserted arm. It can not be necessary if there arm + // body is a block, but we don't bother to check that. + // - Missing after the arm with arms after, if the arm body is a block. In this case + // we don't want to insert a comma at all. + let has_comma_after = + std::iter::successors(match_arm.syntax().last_child_or_token(), |it| { + it.prev_sibling_or_token() + }) + .map(|it| it.kind()) + .skip_while(|it| it.is_trivia()) + .next() + == Some(T![,]); + let has_arms_after = neighbor(&match_arm, Direction::Next).is_some(); + if !has_comma_after && !has_arms_after { + insert_after_old_arm.push(make::token(T![,]).into()); + } + + let indent = IndentLevel::from_node(match_arm.syntax()); + insert_after_old_arm.push(make::tokens::whitespace(&format!("\n{indent}")).into()); + + insert_after_old_arm.push(new_match_arm.syntax().clone().into()); + + ted::insert_all_raw(Position::after(match_arm.syntax()), insert_after_old_arm); + + if has_comma_after { + ted::insert_raw( + Position::last_child_of(new_match_arm.syntax()), + make::token(T![,]), + ); + } + + edit.replace(old_parent_range, new_parent.to_string()); + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn unmerge_match_arm_single_pipe() { + check_assist( + unmerge_match_arm, + r#" +#[derive(Debug)] +enum X { A, B, C } + +fn main() { + let x = X::A; + let y = match x { + X::A $0| X::B => { 1i32 } + X::C => { 2i32 } + }; +} +"#, + r#" +#[derive(Debug)] +enum X { A, B, C } + +fn main() { + let x = X::A; + let y = match x { + X::A => { 1i32 } + X::B => { 1i32 } + X::C => { 2i32 } + }; +} +"#, + ); + } + + #[test] + fn unmerge_match_arm_guard() { + check_assist( + unmerge_match_arm, + r#" +#[derive(Debug)] +enum X { A, B, C } + +fn main() { + let x = X::A; + let y = match x { + X::A $0| X::B if true => { 1i32 } + _ => { 2i32 } + }; +} +"#, + r#" +#[derive(Debug)] +enum X { A, B, C } + +fn main() { + let x = X::A; + let y = match x { + X::A if true => { 1i32 } + X::B if true => { 1i32 } + _ => { 2i32 } + }; +} +"#, + ); + } + + #[test] + fn unmerge_match_arm_leading_pipe() { + check_assist_not_applicable( + unmerge_match_arm, + r#" + +fn main() { + let y = match 0 { + |$0 0 => { 1i32 } + 1 => { 2i32 } + }; +} +"#, + ); + } + + #[test] + fn unmerge_match_arm_multiple_pipes() { + check_assist( + unmerge_match_arm, + r#" +#[derive(Debug)] +enum X { A, B, C, D, E } + +fn main() { + let x = X::A; + let y = match x { + X::A | X::B |$0 X::C | X::D => 1i32, + X::E => 2i32, + }; +} +"#, + r#" +#[derive(Debug)] +enum X { A, B, C, D, E } + +fn main() { + let x = X::A; + let y = match x { + X::A | X::B => 1i32, + X::C | X::D => 1i32, + X::E => 2i32, + }; +} +"#, + ); + } + + #[test] + fn unmerge_match_arm_inserts_comma_if_required() { + check_assist( + unmerge_match_arm, + r#" +#[derive(Debug)] +enum X { A, B } + +fn main() { + let x = X::A; + let y = match x { + X::A $0| X::B => 1i32 + }; +} +"#, + r#" +#[derive(Debug)] +enum X { A, B } + +fn main() { + let x = X::A; + let y = match x { + X::A => 1i32, + X::B => 1i32 + }; +} +"#, + ); + } + + #[test] + fn unmerge_match_arm_inserts_comma_if_had_after() { + check_assist( + unmerge_match_arm, + r#" +#[derive(Debug)] +enum X { A, B } + +fn main() { + let x = X::A; + match x { + X::A $0| X::B => {}, + } +} +"#, + r#" +#[derive(Debug)] +enum X { A, B } + +fn main() { + let x = X::A; + match x { + X::A => {}, + X::B => {}, + } +} +"#, + ); + } +} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 7fb35143fa..e52544db5f 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -122,6 +122,7 @@ mod handlers { mod convert_let_else_to_match; mod convert_tuple_struct_to_named_struct; mod convert_to_guarded_return; + mod convert_two_arm_bool_match_to_matches_macro; mod convert_while_to_loop; mod destructure_tuple_binding; mod expand_glob_import; @@ -179,12 +180,14 @@ mod handlers { mod replace_try_expr_with_match; mod replace_derive_with_manual_impl; mod replace_if_let_with_match; + mod replace_or_with_or_else; mod introduce_named_generic; mod replace_let_with_if_let; mod replace_qualified_name_with_use; mod replace_string_with_char; mod replace_turbofish_with_explicit_type; mod split_import; + mod unmerge_match_arm; mod sort_items; mod toggle_ignore; mod unmerge_use; @@ -215,6 +218,7 @@ mod handlers { convert_let_else_to_match::convert_let_else_to_match, convert_to_guarded_return::convert_to_guarded_return, convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct, + convert_two_arm_bool_match_to_matches_macro::convert_two_arm_bool_match_to_matches_macro, convert_while_to_loop::convert_while_to_loop, destructure_tuple_binding::destructure_tuple_binding, expand_glob_import::expand_glob_import, @@ -273,11 +277,14 @@ mod handlers { replace_if_let_with_match::replace_if_let_with_match, replace_if_let_with_match::replace_match_with_if_let, replace_let_with_if_let::replace_let_with_if_let, + replace_or_with_or_else::replace_or_else_with_or, + replace_or_with_or_else::replace_or_with_or_else, replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type, replace_qualified_name_with_use::replace_qualified_name_with_use, sort_items::sort_items, split_import::split_import, toggle_ignore::toggle_ignore, + unmerge_match_arm::unmerge_match_arm, unmerge_use::unmerge_use, unnecessary_async::unnecessary_async, unwrap_block::unwrap_block, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 22319f3613..a8c8622c1c 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -472,6 +472,26 @@ impl Point { ) } +#[test] +fn doctest_convert_two_arm_bool_match_to_matches_macro() { + check_doc_test( + "convert_two_arm_bool_match_to_matches_macro", + r#####" +fn main() { + match scrutinee$0 { + Some(val) if val.cond() => true, + _ => false, + } +} +"#####, + r#####" +fn main() { + matches!(scrutinee, Some(val) if val.cond()) +} +"#####, + ) +} + #[test] fn doctest_convert_while_to_loop() { check_doc_test( @@ -2009,6 +2029,46 @@ fn handle(action: Action) { ) } +#[test] +fn doctest_replace_or_else_with_or() { + check_doc_test( + "replace_or_else_with_or", + r#####" +//- minicore:option +fn foo() { + let a = Some(1); + a.unwra$0p_or_else(|| 2); +} +"#####, + r#####" +fn foo() { + let a = Some(1); + a.unwrap_or(2); +} +"#####, + ) +} + +#[test] +fn doctest_replace_or_with_or_else() { + check_doc_test( + "replace_or_with_or_else", + r#####" +//- minicore:option +fn foo() { + let a = Some(1); + a.unwra$0p_or(2); +} +"#####, + r#####" +fn foo() { + let a = Some(1); + a.unwrap_or_else(|| 2); +} +"#####, + ) +} + #[test] fn doctest_replace_qualified_name_with_use() { check_doc_test( @@ -2207,6 +2267,32 @@ fn arithmetics { ) } +#[test] +fn doctest_unmerge_match_arm() { + check_doc_test( + "unmerge_match_arm", + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + Action::Move(..) $0| Action::Stop => foo(), + } +} +"#####, + r#####" +enum Action { Move { distance: u32 }, Stop } + +fn handle(action: Action) { + match action { + Action::Move(..) => foo(), + Action::Stop => foo(), + } +} +"#####, + ) +} + #[test] fn doctest_unmerge_use() { check_doc_test( diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 4d66af9e8d..588b52cc1e 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -282,14 +282,26 @@ pub(crate) fn complete_expr_path( } } - if let Some(ty) = innermost_ret_ty { + if let Some(ret_ty) = innermost_ret_ty { add_keyword( "return", - match (in_block_expr, ty.is_unit()) { - (true, true) => "return ;", - (true, false) => "return;", - (false, true) => "return $0", - (false, false) => "return", + match (ret_ty.is_unit(), in_block_expr) { + (true, true) => { + cov_mark::hit!(return_unit_block); + "return;" + } + (true, false) => { + cov_mark::hit!(return_unit_no_block); + "return" + } + (false, true) => { + cov_mark::hit!(return_value_block); + "return $0;" + } + (false, false) => { + cov_mark::hit!(return_value_no_block); + "return $0" + } }, ); } diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 925081ebf6..38e24ebc73 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -1,7 +1,7 @@ //! Completion tests for expressions. use expect_test::{expect, Expect}; -use crate::tests::{completion_list, BASE_ITEMS_FIXTURE}; +use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture)); @@ -670,3 +670,39 @@ fn main() { "#]], ); } + +#[test] +fn return_unit_block() { + cov_mark::check!(return_unit_block); + check_edit("return", r#"fn f() { if true { $0 } }"#, r#"fn f() { if true { return; } }"#); +} + +#[test] +fn return_unit_no_block() { + cov_mark::check!(return_unit_no_block); + check_edit( + "return", + r#"fn f() { match () { () => $0 } }"#, + r#"fn f() { match () { () => return } }"#, + ); +} + +#[test] +fn return_value_block() { + cov_mark::check!(return_value_block); + check_edit( + "return", + r#"fn f() -> i32 { if true { $0 } }"#, + r#"fn f() -> i32 { if true { return $0; } }"#, + ); +} + +#[test] +fn return_value_no_block() { + cov_mark::check!(return_value_no_block); + check_edit( + "return", + r#"fn f() -> i32 { match () { () => $0 } }"#, + r#"fn f() -> i32 { match () { () => return $0 } }"#, + ); +} diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index 84bde4d44d..b890e2b58d 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -315,7 +315,6 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::IndexExpr(_) | ast::Expr::Literal(_) | ast::Expr::MacroExpr(_) - | ast::Expr::MacroStmts(_) | ast::Expr::MethodCallExpr(_) | ast::Expr::ParenExpr(_) | ast::Expr::PathExpr(_) diff --git a/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs index d12594a4ce..0c92e706b3 100644 --- a/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs +++ b/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs @@ -7,9 +7,10 @@ pub(crate) fn break_outside_of_loop( ctx: &DiagnosticsContext<'_>, d: &hir::BreakOutsideOfLoop, ) -> Diagnostic { + let construct = if d.is_break { "break" } else { "continue" }; Diagnostic::new( "break-outside-of-loop", - "break outside of loop", + format!("{construct} outside of loop"), ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, ) } @@ -19,11 +20,122 @@ mod tests { use crate::tests::check_diagnostics; #[test] - fn break_outside_of_loop() { + fn outside_of_loop() { check_diagnostics( r#" -fn foo() { break; } - //^^^^^ error: break outside of loop +fn foo() { + break; + //^^^^^ error: break outside of loop + break 'a; + //^^^^^^^^ error: break outside of loop + continue; + //^^^^^^^^ error: continue outside of loop + continue 'a; + //^^^^^^^^^^^ error: continue outside of loop +} +"#, + ); + } + + #[test] + fn try_blocks_are_borders() { + check_diagnostics( + r#" +fn foo() { + 'a: loop { + try { + break; + //^^^^^ error: break outside of loop + break 'a; + //^^^^^^^^ error: break outside of loop + continue; + //^^^^^^^^ error: continue outside of loop + continue 'a; + //^^^^^^^^^^^ error: continue outside of loop + }; + } +} +"#, + ); + } + + #[test] + fn async_blocks_are_borders() { + check_diagnostics( + r#" +fn foo() { + 'a: loop { + try { + break; + //^^^^^ error: break outside of loop + break 'a; + //^^^^^^^^ error: break outside of loop + continue; + //^^^^^^^^ error: continue outside of loop + continue 'a; + //^^^^^^^^^^^ error: continue outside of loop + }; + } +} +"#, + ); + } + + #[test] + fn closures_are_borders() { + check_diagnostics( + r#" +fn foo() { + 'a: loop { + try { + break; + //^^^^^ error: break outside of loop + break 'a; + //^^^^^^^^ error: break outside of loop + continue; + //^^^^^^^^ error: continue outside of loop + continue 'a; + //^^^^^^^^^^^ error: continue outside of loop + }; + } +} +"#, + ); + } + + #[test] + fn blocks_pass_through() { + check_diagnostics( + r#" +fn foo() { + 'a: loop { + { + break; + break 'a; + continue; + continue 'a; + } + } +} +"#, + ); + } + + #[test] + fn label_blocks() { + check_diagnostics( + r#" +fn foo() { + 'a: { + break; + //^^^^^ error: break outside of loop + break 'a; + continue; + //^^^^^^^^ error: continue outside of loop + continue 'a; + //^^^^^^^^^^^ error: continue outside of loop + } +} "#, ); } diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 5fcaf405b1..c24430ce60 100644 --- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -947,6 +947,50 @@ fn f() { ); } + mod rust_unstable { + use super::*; + + #[test] + fn rfc_1872_exhaustive_patterns() { + check_diagnostics_no_bails( + r" +//- minicore: option, result +#![feature(exhaustive_patterns)] +enum Void {} +fn test() { + match None:: { None => () } + match Result::::Ok(2) { Ok(_) => () } + match Result::::Ok(2) { Ok(_) => () } + match (2, loop {}) {} + match Result::::Ok(loop {}) {} + match (&loop {}) {} // https://github.com/rust-lang/rust/issues/50642#issuecomment-388234919 + // ^^^^^^^^^^ error: missing match arm: type `&!` is non-empty +}", + ); + } + + #[test] + fn rfc_1872_private_uninhabitedness() { + check_diagnostics_no_bails( + r" +//- minicore: option +//- /lib.rs crate:lib +#![feature(exhaustive_patterns)] +pub struct PrivatelyUninhabited { private_field: Void } +enum Void {} +fn test_local(x: Option) { + match x {} +} // ^ error: missing match arm: `None` not covered +//- /main.rs crate:main deps:lib +#![feature(exhaustive_patterns)] +fn test(x: Option) { + match x {} + // ^ error: missing match arm: `None` and `Some(_)` not covered +}", + ); + } + } + mod false_negatives { //! The implementation of match checking here is a work in progress. As we roll this out, we //! prefer false negatives to false positives (ideally there would be no false positives). This diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index ed19784d1f..e9034daefa 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -1910,7 +1910,7 @@ impl Vec { pub struct Box {} trait Display {} -trait Sync {} +auto trait Sync {} fn main() { // The block expression wrapping disables the constructor hint hiding logic diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs index 8de5d33a19..5dc9c6c82a 100644 --- a/crates/parser/src/grammar/paths.rs +++ b/crates/parser/src/grammar/paths.rs @@ -118,6 +118,11 @@ fn opt_path_type_args(p: &mut Parser<'_>, mode: Mode) { match mode { Mode::Use => {} Mode::Type => { + // test typepathfn_with_coloncolon + // type F = Start::(Middle) -> (Middle)::End; + if p.at(T![::]) && p.nth_at(2, T!['(']) { + p.bump(T![::]); + } // test path_fn_trait_args // type F = Box ()>; if p.at(T!['(']) { diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 628fa745e7..c84f45f1f8 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -262,33 +262,117 @@ pub enum SyntaxKind { use self::SyntaxKind::*; impl SyntaxKind { pub fn is_keyword(self) -> bool { - match self { - AS_KW | ASYNC_KW | AWAIT_KW | BOX_KW | BREAK_KW | CONST_KW | CONTINUE_KW | CRATE_KW - | DYN_KW | ELSE_KW | ENUM_KW | EXTERN_KW | FALSE_KW | FN_KW | FOR_KW | IF_KW - | IMPL_KW | IN_KW | LET_KW | LOOP_KW | MACRO_KW | MATCH_KW | MOD_KW | MOVE_KW - | MUT_KW | PUB_KW | REF_KW | RETURN_KW | SELF_KW | SELF_TYPE_KW | STATIC_KW - | STRUCT_KW | SUPER_KW | TRAIT_KW | TRUE_KW | TRY_KW | TYPE_KW | UNSAFE_KW | USE_KW - | WHERE_KW | WHILE_KW | YIELD_KW | AUTO_KW | DEFAULT_KW | EXISTENTIAL_KW | UNION_KW - | RAW_KW | MACRO_RULES_KW => true, - _ => false, - } + matches!( + self, + AS_KW + | ASYNC_KW + | AWAIT_KW + | BOX_KW + | BREAK_KW + | CONST_KW + | CONTINUE_KW + | CRATE_KW + | DYN_KW + | ELSE_KW + | ENUM_KW + | EXTERN_KW + | FALSE_KW + | FN_KW + | FOR_KW + | IF_KW + | IMPL_KW + | IN_KW + | LET_KW + | LOOP_KW + | MACRO_KW + | MATCH_KW + | MOD_KW + | MOVE_KW + | MUT_KW + | PUB_KW + | REF_KW + | RETURN_KW + | SELF_KW + | SELF_TYPE_KW + | STATIC_KW + | STRUCT_KW + | SUPER_KW + | TRAIT_KW + | TRUE_KW + | TRY_KW + | TYPE_KW + | UNSAFE_KW + | USE_KW + | WHERE_KW + | WHILE_KW + | YIELD_KW + | AUTO_KW + | DEFAULT_KW + | EXISTENTIAL_KW + | UNION_KW + | RAW_KW + | MACRO_RULES_KW + ) } pub fn is_punct(self) -> bool { - match self { - SEMICOLON | COMMA | L_PAREN | R_PAREN | L_CURLY | R_CURLY | L_BRACK | R_BRACK - | L_ANGLE | R_ANGLE | AT | POUND | TILDE | QUESTION | DOLLAR | AMP | PIPE | PLUS - | STAR | SLASH | CARET | PERCENT | UNDERSCORE | DOT | DOT2 | DOT3 | DOT2EQ | COLON - | COLON2 | EQ | EQ2 | FAT_ARROW | BANG | NEQ | MINUS | THIN_ARROW | LTEQ | GTEQ - | PLUSEQ | MINUSEQ | PIPEEQ | AMPEQ | CARETEQ | SLASHEQ | STAREQ | PERCENTEQ | AMP2 - | PIPE2 | SHL | SHR | SHLEQ | SHREQ => true, - _ => false, - } + matches!( + self, + SEMICOLON + | COMMA + | L_PAREN + | R_PAREN + | L_CURLY + | R_CURLY + | L_BRACK + | R_BRACK + | L_ANGLE + | R_ANGLE + | AT + | POUND + | TILDE + | QUESTION + | DOLLAR + | AMP + | PIPE + | PLUS + | STAR + | SLASH + | CARET + | PERCENT + | UNDERSCORE + | DOT + | DOT2 + | DOT3 + | DOT2EQ + | COLON + | COLON2 + | EQ + | EQ2 + | FAT_ARROW + | BANG + | NEQ + | MINUS + | THIN_ARROW + | LTEQ + | GTEQ + | PLUSEQ + | MINUSEQ + | PIPEEQ + | AMPEQ + | CARETEQ + | SLASHEQ + | STAREQ + | PERCENTEQ + | AMP2 + | PIPE2 + | SHL + | SHR + | SHLEQ + | SHREQ + ) } pub fn is_literal(self) -> bool { - match self { - INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING => true, - _ => false, - } + matches!(self, INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING) } pub fn from_keyword(ident: &str) -> Option { let kw = match ident { diff --git a/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rast b/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rast new file mode 100644 index 0000000000..b47a5a5c14 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + TYPE_ALIAS + TYPE_KW "type" + WHITESPACE " " + NAME + IDENT "F" + WHITESPACE " " + EQ "=" + WHITESPACE " " + PATH_TYPE + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "Start" + COLON2 "::" + PARAM_LIST + L_PAREN "(" + PARAM + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Middle" + R_PAREN ")" + WHITESPACE " " + RET_TYPE + THIN_ARROW "->" + WHITESPACE " " + PAREN_TYPE + L_PAREN "(" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Middle" + R_PAREN ")" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "End" + SEMICOLON ";" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs b/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs new file mode 100644 index 0000000000..8efd93a7ff --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0202_typepathfn_with_coloncolon.rs @@ -0,0 +1 @@ +type F = Start::(Middle) -> (Middle)::End; diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 706e1742df..92df4d70fd 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -116,6 +116,7 @@ pub(crate) struct GlobalStateSnapshot { pub(crate) semantic_tokens_cache: Arc>>, vfs: Arc)>>, pub(crate) workspaces: Arc>, + pub(crate) proc_macros_loaded: bool, } impl std::panic::UnwindSafe for GlobalStateSnapshot {} @@ -256,6 +257,7 @@ impl GlobalState { check_fixes: Arc::clone(&self.diagnostics.check_fixes), mem_docs: self.mem_docs.clone(), semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache), + proc_macros_loaded: !self.fetch_build_data_queue.last_op_result().0.is_empty(), } } diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index d89f0f5a3c..d9b669afbe 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -1504,7 +1504,11 @@ pub(crate) fn handle_semantic_tokens_full( let text = snap.analysis.file_text(file_id)?; let line_index = snap.file_line_index(file_id)?; - let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?; + let mut highlight_config = snap.config.highlighting_config(); + // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. + highlight_config.syntactic_name_ref_highlighting = !snap.proc_macros_loaded; + + let highlights = snap.analysis.highlight(highlight_config, file_id)?; let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); // Unconditionally cache the tokens @@ -1523,7 +1527,11 @@ pub(crate) fn handle_semantic_tokens_full_delta( let text = snap.analysis.file_text(file_id)?; let line_index = snap.file_line_index(file_id)?; - let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?; + let mut highlight_config = snap.config.highlighting_config(); + // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. + highlight_config.syntactic_name_ref_highlighting = !snap.proc_macros_loaded; + + let highlights = snap.analysis.highlight(highlight_config, file_id)?; let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); let mut cache = snap.semantic_tokens_cache.lock(); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index ceb2a6d703..f23bbca638 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -347,8 +347,8 @@ impl GlobalState { error }) }) - .collect(); - } + .collect() + }; } let watch = match files_config.watcher { diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index 62aa478399..8947954354 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -343,7 +343,6 @@ Expr = | Literal | LoopExpr | MacroExpr -| MacroStmts | MatchExpr | MethodCallExpr | ParenExpr diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 63309a1552..449402e5f5 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1526,7 +1526,6 @@ pub enum Expr { Literal(Literal), LoopExpr(LoopExpr), MacroExpr(MacroExpr), - MacroStmts(MacroStmts), MatchExpr(MatchExpr), MethodCallExpr(MethodCallExpr), ParenExpr(ParenExpr), @@ -3169,10 +3168,7 @@ impl From for GenericArg { } impl AstNode for GenericArg { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG => true, - _ => false, - } + matches!(kind, TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3237,12 +3233,23 @@ impl From for Type { } impl AstNode for Type { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ARRAY_TYPE | DYN_TRAIT_TYPE | FN_PTR_TYPE | FOR_TYPE | IMPL_TRAIT_TYPE | INFER_TYPE - | MACRO_TYPE | NEVER_TYPE | PAREN_TYPE | PATH_TYPE | PTR_TYPE | REF_TYPE - | SLICE_TYPE | TUPLE_TYPE => true, - _ => false, - } + matches!( + kind, + ARRAY_TYPE + | DYN_TRAIT_TYPE + | FN_PTR_TYPE + | FOR_TYPE + | IMPL_TRAIT_TYPE + | INFER_TYPE + | MACRO_TYPE + | NEVER_TYPE + | PAREN_TYPE + | PATH_TYPE + | PTR_TYPE + | REF_TYPE + | SLICE_TYPE + | TUPLE_TYPE + ) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3334,9 +3341,6 @@ impl From for Expr { impl From for Expr { fn from(node: MacroExpr) -> Expr { Expr::MacroExpr(node) } } -impl From for Expr { - fn from(node: MacroStmts) -> Expr { Expr::MacroStmts(node) } -} impl From for Expr { fn from(node: MatchExpr) -> Expr { Expr::MatchExpr(node) } } @@ -3384,15 +3388,41 @@ impl From for Expr { } impl AstNode for Expr { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ARRAY_EXPR | AWAIT_EXPR | BIN_EXPR | BLOCK_EXPR | BOX_EXPR | BREAK_EXPR | CALL_EXPR - | CAST_EXPR | CLOSURE_EXPR | CONTINUE_EXPR | FIELD_EXPR | FOR_EXPR | IF_EXPR - | INDEX_EXPR | LITERAL | LOOP_EXPR | MACRO_EXPR | MACRO_STMTS | MATCH_EXPR - | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR | PREFIX_EXPR | RANGE_EXPR - | RECORD_EXPR | REF_EXPR | RETURN_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR - | YIELD_EXPR | LET_EXPR | UNDERSCORE_EXPR => true, - _ => false, - } + matches!( + kind, + ARRAY_EXPR + | AWAIT_EXPR + | BIN_EXPR + | BLOCK_EXPR + | BOX_EXPR + | BREAK_EXPR + | CALL_EXPR + | CAST_EXPR + | CLOSURE_EXPR + | CONTINUE_EXPR + | FIELD_EXPR + | FOR_EXPR + | IF_EXPR + | INDEX_EXPR + | LITERAL + | LOOP_EXPR + | MACRO_EXPR + | MATCH_EXPR + | METHOD_CALL_EXPR + | PAREN_EXPR + | PATH_EXPR + | PREFIX_EXPR + | RANGE_EXPR + | RECORD_EXPR + | REF_EXPR + | RETURN_EXPR + | TRY_EXPR + | TUPLE_EXPR + | WHILE_EXPR + | YIELD_EXPR + | LET_EXPR + | UNDERSCORE_EXPR + ) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3413,7 +3443,6 @@ impl AstNode for Expr { LITERAL => Expr::Literal(Literal { syntax }), LOOP_EXPR => Expr::LoopExpr(LoopExpr { syntax }), MACRO_EXPR => Expr::MacroExpr(MacroExpr { syntax }), - MACRO_STMTS => Expr::MacroStmts(MacroStmts { syntax }), MATCH_EXPR => Expr::MatchExpr(MatchExpr { syntax }), METHOD_CALL_EXPR => Expr::MethodCallExpr(MethodCallExpr { syntax }), PAREN_EXPR => Expr::ParenExpr(ParenExpr { syntax }), @@ -3452,7 +3481,6 @@ impl AstNode for Expr { Expr::Literal(it) => &it.syntax, Expr::LoopExpr(it) => &it.syntax, Expr::MacroExpr(it) => &it.syntax, - Expr::MacroStmts(it) => &it.syntax, Expr::MatchExpr(it) => &it.syntax, Expr::MethodCallExpr(it) => &it.syntax, Expr::ParenExpr(it) => &it.syntax, @@ -3521,11 +3549,25 @@ impl From for Item { } impl AstNode for Item { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL | MACRO_CALL | MACRO_RULES - | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE => true, - _ => false, - } + matches!( + kind, + CONST + | ENUM + | EXTERN_BLOCK + | EXTERN_CRATE + | FN + | IMPL + | MACRO_CALL + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | USE + ) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3629,12 +3671,25 @@ impl From for Pat { } impl AstNode for Pat { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - IDENT_PAT | BOX_PAT | REST_PAT | LITERAL_PAT | MACRO_PAT | OR_PAT | PAREN_PAT - | PATH_PAT | WILDCARD_PAT | RANGE_PAT | RECORD_PAT | REF_PAT | SLICE_PAT - | TUPLE_PAT | TUPLE_STRUCT_PAT | CONST_BLOCK_PAT => true, - _ => false, - } + matches!( + kind, + IDENT_PAT + | BOX_PAT + | REST_PAT + | LITERAL_PAT + | MACRO_PAT + | OR_PAT + | PAREN_PAT + | PATH_PAT + | WILDCARD_PAT + | RANGE_PAT + | RECORD_PAT + | REF_PAT + | SLICE_PAT + | TUPLE_PAT + | TUPLE_STRUCT_PAT + | CONST_BLOCK_PAT + ) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3686,12 +3741,7 @@ impl From for FieldList { fn from(node: TupleFieldList) -> FieldList { FieldList::TupleFieldList(node) } } impl AstNode for FieldList { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - RECORD_FIELD_LIST | TUPLE_FIELD_LIST => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, RECORD_FIELD_LIST | TUPLE_FIELD_LIST) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { RECORD_FIELD_LIST => FieldList::RecordFieldList(RecordFieldList { syntax }), @@ -3717,12 +3767,7 @@ impl From for Adt { fn from(node: Union) -> Adt { Adt::Union(node) } } impl AstNode for Adt { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ENUM | STRUCT | UNION => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, ENUM | STRUCT | UNION) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { ENUM => Adt::Enum(Enum { syntax }), @@ -3753,12 +3798,7 @@ impl From for AssocItem { fn from(node: TypeAlias) -> AssocItem { AssocItem::TypeAlias(node) } } impl AstNode for AssocItem { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST | FN | MACRO_CALL | TYPE_ALIAS => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CONST | FN | MACRO_CALL | TYPE_ALIAS) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { CONST => AssocItem::Const(Const { syntax }), @@ -3791,12 +3831,7 @@ impl From for ExternItem { fn from(node: TypeAlias) -> ExternItem { ExternItem::TypeAlias(node) } } impl AstNode for ExternItem { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - FN | MACRO_CALL | STATIC | TYPE_ALIAS => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FN | MACRO_CALL | STATIC | TYPE_ALIAS) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { FN => ExternItem::Fn(Fn { syntax }), @@ -3827,10 +3862,7 @@ impl From for GenericParam { } impl AstNode for GenericParam { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM => true, - _ => false, - } + matches!(kind, CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -3856,12 +3888,7 @@ impl AnyHasArgList { } } impl AstNode for AnyHasArgList { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CALL_EXPR | METHOD_CALL_EXPR => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax }) } @@ -3875,76 +3902,76 @@ impl AnyHasAttrs { } impl AstNode for AnyHasAttrs { fn can_cast(kind: SyntaxKind) -> bool { - match kind { + matches!( + kind, MACRO_CALL - | SOURCE_FILE - | CONST - | ENUM - | EXTERN_BLOCK - | EXTERN_CRATE - | FN - | IMPL - | MACRO_RULES - | MACRO_DEF - | MODULE - | STATIC - | STRUCT - | TRAIT - | TYPE_ALIAS - | UNION - | USE - | ITEM_LIST - | BLOCK_EXPR - | SELF_PARAM - | PARAM - | RECORD_FIELD - | TUPLE_FIELD - | VARIANT - | ASSOC_ITEM_LIST - | EXTERN_ITEM_LIST - | CONST_PARAM - | LIFETIME_PARAM - | TYPE_PARAM - | LET_STMT - | ARRAY_EXPR - | AWAIT_EXPR - | BIN_EXPR - | BOX_EXPR - | BREAK_EXPR - | CALL_EXPR - | CAST_EXPR - | CLOSURE_EXPR - | CONTINUE_EXPR - | FIELD_EXPR - | FOR_EXPR - | IF_EXPR - | INDEX_EXPR - | LITERAL - | LOOP_EXPR - | MATCH_EXPR - | METHOD_CALL_EXPR - | PAREN_EXPR - | PATH_EXPR - | PREFIX_EXPR - | RANGE_EXPR - | REF_EXPR - | RETURN_EXPR - | TRY_EXPR - | TUPLE_EXPR - | WHILE_EXPR - | YIELD_EXPR - | LET_EXPR - | UNDERSCORE_EXPR - | STMT_LIST - | RECORD_EXPR_FIELD_LIST - | RECORD_EXPR_FIELD - | MATCH_ARM_LIST - | MATCH_ARM - | IDENT_PAT - | REST_PAT - | RECORD_PAT_FIELD => true, - _ => false, - } + | SOURCE_FILE + | CONST + | ENUM + | EXTERN_BLOCK + | EXTERN_CRATE + | FN + | IMPL + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | USE + | ITEM_LIST + | BLOCK_EXPR + | SELF_PARAM + | PARAM + | RECORD_FIELD + | TUPLE_FIELD + | VARIANT + | ASSOC_ITEM_LIST + | EXTERN_ITEM_LIST + | CONST_PARAM + | LIFETIME_PARAM + | TYPE_PARAM + | LET_STMT + | ARRAY_EXPR + | AWAIT_EXPR + | BIN_EXPR + | BOX_EXPR + | BREAK_EXPR + | CALL_EXPR + | CAST_EXPR + | CLOSURE_EXPR + | CONTINUE_EXPR + | FIELD_EXPR + | FOR_EXPR + | IF_EXPR + | INDEX_EXPR + | LITERAL + | LOOP_EXPR + | MATCH_EXPR + | METHOD_CALL_EXPR + | PAREN_EXPR + | PATH_EXPR + | PREFIX_EXPR + | RANGE_EXPR + | REF_EXPR + | RETURN_EXPR + | TRY_EXPR + | TUPLE_EXPR + | WHILE_EXPR + | YIELD_EXPR + | LET_EXPR + | UNDERSCORE_EXPR + | STMT_LIST + | RECORD_EXPR_FIELD_LIST + | RECORD_EXPR_FIELD + | MATCH_ARM_LIST + | MATCH_ARM + | IDENT_PAT + | REST_PAT + | RECORD_PAT_FIELD + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax }) @@ -3959,12 +3986,29 @@ impl AnyHasDocComments { } impl AstNode for AnyHasDocComments { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - MACRO_CALL | SOURCE_FILE | CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL - | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION - | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => true, - _ => false, - } + matches!( + kind, + MACRO_CALL + | SOURCE_FILE + | CONST + | ENUM + | EXTERN_BLOCK + | EXTERN_CRATE + | FN + | IMPL + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | USE + | RECORD_FIELD + | TUPLE_FIELD + | VARIANT + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax }) @@ -3979,10 +4023,7 @@ impl AnyHasGenericParams { } impl AstNode for AnyHasGenericParams { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION => true, - _ => false, - } + matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax }) @@ -3996,12 +4037,7 @@ impl AnyHasLoopBody { } } impl AstNode for AnyHasLoopBody { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - FOR_EXPR | LOOP_EXPR | WHILE_EXPR => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax }) } @@ -4014,12 +4050,7 @@ impl AnyHasModuleItem { } } impl AstNode for AnyHasModuleItem { - fn can_cast(kind: SyntaxKind) -> bool { - match kind { - MACRO_ITEMS | SOURCE_FILE | ITEM_LIST => true, - _ => false, - } - } + fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, MACRO_ITEMS | SOURCE_FILE | ITEM_LIST) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax }) } @@ -4033,12 +4064,27 @@ impl AnyHasName { } impl AstNode for AnyHasName { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST | ENUM | FN | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT - | TYPE_ALIAS | UNION | RENAME | SELF_PARAM | RECORD_FIELD | VARIANT | CONST_PARAM - | TYPE_PARAM | IDENT_PAT => true, - _ => false, - } + matches!( + kind, + CONST + | ENUM + | FN + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | RENAME + | SELF_PARAM + | RECORD_FIELD + | VARIANT + | CONST_PARAM + | TYPE_PARAM + | IDENT_PAT + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax }) @@ -4053,10 +4099,10 @@ impl AnyHasTypeBounds { } impl AstNode for AnyHasTypeBounds { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - ASSOC_TYPE_ARG | TRAIT | TYPE_ALIAS | LIFETIME_PARAM | TYPE_PARAM | WHERE_PRED => true, - _ => false, - } + matches!( + kind, + ASSOC_TYPE_ARG | TRAIT | TYPE_ALIAS | LIFETIME_PARAM | TYPE_PARAM | WHERE_PRED + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax }) @@ -4071,13 +4117,26 @@ impl AnyHasVisibility { } impl AstNode for AnyHasVisibility { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - CONST | ENUM | EXTERN_CRATE | FN | IMPL | MACRO_RULES | MACRO_DEF | MODULE | STATIC - | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => { - true - } - _ => false, - } + matches!( + kind, + CONST + | ENUM + | EXTERN_CRATE + | FN + | IMPL + | MACRO_RULES + | MACRO_DEF + | MODULE + | STATIC + | STRUCT + | TRAIT + | TYPE_ALIAS + | UNION + | USE + | RECORD_FIELD + | TUPLE_FIELD + | VARIANT + ) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax }) diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 037de876d4..83f8bbac58 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -25,7 +25,7 @@ pub mod ext { return from_text(&name.text()); fn from_text(text: &str) -> ast::IdentPat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } pub fn ident_path(ident: &str) -> ast::Path { @@ -60,10 +60,10 @@ pub mod ext { expr_from_text("todo!()") } pub fn expr_ty_default(ty: &ast::Type) -> ast::Expr { - expr_from_text(&format!("{}::default()", ty)) + expr_from_text(&format!("{ty}::default()")) } pub fn expr_ty_new(ty: &ast::Type) -> ast::Expr { - expr_from_text(&format!("{}::new()", ty)) + expr_from_text(&format!("{ty}::new()")) } pub fn zero_number() -> ast::Expr { @@ -92,18 +92,20 @@ pub mod ext { ty_path(ident_path("bool")) } pub fn ty_option(t: ast::Type) -> ast::Type { - ty_from_text(&format!("Option<{}>", t)) + ty_from_text(&format!("Option<{t}>")) } pub fn ty_result(t: ast::Type, e: ast::Type) -> ast::Type { - ty_from_text(&format!("Result<{}, {}>", t, e)) + ty_from_text(&format!("Result<{t}, {e}>")) } } -pub fn name(text: &str) -> ast::Name { - ast_from_text(&format!("mod {}{};", raw_ident_esc(text), text)) +pub fn name(name: &str) -> ast::Name { + let raw_escape = raw_ident_esc(name); + ast_from_text(&format!("mod {raw_escape}{name};")) } -pub fn name_ref(text: &str) -> ast::NameRef { - ast_from_text(&format!("fn f() {{ {}{}; }}", raw_ident_esc(text), text)) +pub fn name_ref(name_ref: &str) -> ast::NameRef { + let raw_escape = raw_ident_esc(name_ref); + ast_from_text(&format!("fn f() {{ {raw_escape}{name_ref}; }}")) } fn raw_ident_esc(ident: &str) -> &'static str { let is_keyword = parser::SyntaxKind::from_keyword(ident).is_some(); @@ -118,10 +120,10 @@ pub fn lifetime(text: &str) -> ast::Lifetime { let mut text = text; let tmp; if never!(!text.starts_with('\'')) { - tmp = format!("'{}", text); + tmp = format!("'{text}"); text = &tmp; } - ast_from_text(&format!("fn f<{}>() {{ }}", text)) + ast_from_text(&format!("fn f<{text}>() {{ }}")) } // FIXME: replace stringly-typed constructor with a family of typed ctors, a-la @@ -142,16 +144,16 @@ pub fn ty_tuple(types: impl IntoIterator) -> ast::Type { contents.push(','); } - ty_from_text(&format!("({})", contents)) + ty_from_text(&format!("({contents})")) } pub fn ty_ref(target: ast::Type, exclusive: bool) -> ast::Type { - ty_from_text(&if exclusive { format!("&mut {}", target) } else { format!("&{}", target) }) + ty_from_text(&if exclusive { format!("&mut {target}") } else { format!("&{target}") }) } pub fn ty_path(path: ast::Path) -> ast::Type { ty_from_text(&path.to_string()) } fn ty_from_text(text: &str) -> ast::Type { - ast_from_text(&format!("type _T = {};", text)) + ast_from_text(&format!("type _T = {text};")) } pub fn assoc_item_list() -> ast::AssocItemList { @@ -171,7 +173,7 @@ pub fn impl_( Some(params) => params.to_string(), None => String::new(), }; - ast_from_text(&format!("impl{} {}{} {{}}", params, ty, ty_params)) + ast_from_text(&format!("impl{params} {ty}{ty_params} {{}}")) } pub fn impl_trait( @@ -180,7 +182,7 @@ pub fn impl_trait( ty_params: Option, ) -> ast::Impl { let ty_params = ty_params.map_or_else(String::new, |params| params.to_string()); - ast_from_text(&format!("impl{2} {} for {}{2} {{}}", trait_, ty, ty_params)) + ast_from_text(&format!("impl{ty_params} {trait_} for {ty}{ty_params} {{}}")) } pub(crate) fn generic_arg_list() -> ast::GenericArgList { @@ -188,13 +190,13 @@ pub(crate) fn generic_arg_list() -> ast::GenericArgList { } pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment { - ast_from_text(&format!("type __ = {};", name_ref)) + ast_from_text(&format!("type __ = {name_ref};")) } pub fn path_segment_ty(type_ref: ast::Type, trait_ref: Option) -> ast::PathSegment { let text = match trait_ref { - Some(trait_ref) => format!("fn f(x: <{} as {}>) {{}}", type_ref, trait_ref), - None => format!("fn f(x: <{}>) {{}}", type_ref), + Some(trait_ref) => format!("fn f(x: <{type_ref} as {trait_ref}>) {{}}"), + None => format!("fn f(x: <{type_ref}>) {{}}"), }; ast_from_text(&text) } @@ -212,15 +214,15 @@ pub fn path_segment_crate() -> ast::PathSegment { } pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path { - ast_from_text(&format!("type __ = {};", segment)) + ast_from_text(&format!("type __ = {segment};")) } pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path { - ast_from_text(&format!("{}::{}", qual, segment)) + ast_from_text(&format!("{qual}::{segment}")) } // FIXME: path concatenation operation doesn't make sense as AST op. pub fn path_concat(first: ast::Path, second: ast::Path) -> ast::Path { - ast_from_text(&format!("type __ = {}::{};", first, second)) + ast_from_text(&format!("type __ = {first}::{second};")) } pub fn path_from_segments( @@ -229,20 +231,20 @@ pub fn path_from_segments( ) -> ast::Path { let segments = segments.into_iter().map(|it| it.syntax().clone()).join("::"); ast_from_text(&if is_abs { - format!("fn f(x: ::{}) {{}}", segments) + format!("fn f(x: ::{segments}) {{}}") } else { - format!("fn f(x: {}) {{}}", segments) + format!("fn f(x: {segments}) {{}}") }) } pub fn join_paths(paths: impl IntoIterator) -> ast::Path { let paths = paths.into_iter().map(|it| it.syntax().clone()).join("::"); - ast_from_text(&format!("type __ = {};", paths)) + ast_from_text(&format!("type __ = {paths};")) } // FIXME: should not be pub pub fn path_from_text(text: &str) -> ast::Path { - ast_from_text(&format!("fn main() {{ let test = {}; }}", text)) + ast_from_text(&format!("fn main() {{ let test = {text}; }}")) } pub fn use_tree_glob() -> ast::UseTree { @@ -257,50 +259,50 @@ pub fn use_tree( let mut buf = "use ".to_string(); buf += &path.syntax().to_string(); if let Some(use_tree_list) = use_tree_list { - format_to!(buf, "::{}", use_tree_list); + format_to!(buf, "::{use_tree_list}"); } if add_star { buf += "::*"; } if let Some(alias) = alias { - format_to!(buf, " {}", alias); + format_to!(buf, " {alias}"); } ast_from_text(&buf) } pub fn use_tree_list(use_trees: impl IntoIterator) -> ast::UseTreeList { let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", "); - ast_from_text(&format!("use {{{}}};", use_trees)) + ast_from_text(&format!("use {{{use_trees}}};")) } pub fn use_(visibility: Option, use_tree: ast::UseTree) -> ast::Use { let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!("{}use {};", visibility, use_tree)) + ast_from_text(&format!("{visibility}use {use_tree};")) } pub fn record_expr(path: ast::Path, fields: ast::RecordExprFieldList) -> ast::RecordExpr { - ast_from_text(&format!("fn f() {{ {} {} }}", path, fields)) + ast_from_text(&format!("fn f() {{ {path} {fields} }}")) } pub fn record_expr_field_list( fields: impl IntoIterator, ) -> ast::RecordExprFieldList { let fields = fields.into_iter().join(", "); - ast_from_text(&format!("fn f() {{ S {{ {} }} }}", fields)) + ast_from_text(&format!("fn f() {{ S {{ {fields} }} }}")) } pub fn record_expr_field(name: ast::NameRef, expr: Option) -> ast::RecordExprField { return match expr { - Some(expr) => from_text(&format!("{}: {}", name, expr)), + Some(expr) => from_text(&format!("{name}: {expr}")), None => from_text(&name.to_string()), }; fn from_text(text: &str) -> ast::RecordExprField { - ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text)) + ast_from_text(&format!("fn f() {{ S {{ {text}, }} }}")) } } @@ -311,9 +313,9 @@ pub fn record_field( ) -> ast::RecordField { let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!("struct S {{ {}{}: {}, }}", visibility, name, ty)) + ast_from_text(&format!("struct S {{ {visibility}{name}: {ty}, }}")) } // TODO @@ -323,13 +325,13 @@ pub fn block_expr( ) -> ast::BlockExpr { let mut buf = "{\n".to_string(); for stmt in stmts.into_iter() { - format_to!(buf, " {}\n", stmt); + format_to!(buf, " {stmt}\n"); } if let Some(tail_expr) = tail_expr { - format_to!(buf, " {}\n", tail_expr); + format_to!(buf, " {tail_expr}\n"); } buf += "}"; - ast_from_text(&format!("fn f() {}", buf)) + ast_from_text(&format!("fn f() {buf}")) } /// Ideally this function wouldn't exist since it involves manual indenting. @@ -343,18 +345,18 @@ pub fn hacky_block_expr_with_comments( let mut buf = "{\n".to_string(); for node_or_token in elements.into_iter() { match node_or_token { - rowan::NodeOrToken::Node(n) => format_to!(buf, " {}\n", n), + rowan::NodeOrToken::Node(n) => format_to!(buf, " {n}\n"), rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::COMMENT => { - format_to!(buf, " {}\n", t) + format_to!(buf, " {t}\n") } _ => (), } } if let Some(tail_expr) = tail_expr { - format_to!(buf, " {}\n", tail_expr); + format_to!(buf, " {tail_expr}\n"); } buf += "}"; - ast_from_text(&format!("fn f() {}", buf)) + ast_from_text(&format!("fn f() {buf}")) } pub fn expr_unit() -> ast::Expr { @@ -362,7 +364,7 @@ pub fn expr_unit() -> ast::Expr { } pub fn expr_literal(text: &str) -> ast::Literal { assert_eq!(text.trim(), text); - ast_from_text(&format!("fn f() {{ let _ = {}; }}", text)) + ast_from_text(&format!("fn f() {{ let _ = {text}; }}")) } pub fn expr_empty_block() -> ast::Expr { @@ -373,41 +375,41 @@ pub fn expr_path(path: ast::Path) -> ast::Expr { } pub fn expr_continue(label: Option) -> ast::Expr { match label { - Some(label) => expr_from_text(&format!("continue {}", label)), + Some(label) => expr_from_text(&format!("continue {label}")), None => expr_from_text("continue"), } } // Consider `op: SyntaxKind` instead for nicer syntax at the call-site? pub fn expr_bin_op(lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::Expr { - expr_from_text(&format!("{} {} {}", lhs, op, rhs)) + expr_from_text(&format!("{lhs} {op} {rhs}")) } pub fn expr_break(label: Option, expr: Option) -> ast::Expr { let mut s = String::from("break"); if let Some(label) = label { - format_to!(s, " {}", label); + format_to!(s, " {label}"); } if let Some(expr) = expr { - format_to!(s, " {}", expr); + format_to!(s, " {expr}"); } expr_from_text(&s) } pub fn expr_return(expr: Option) -> ast::Expr { match expr { - Some(expr) => expr_from_text(&format!("return {}", expr)), + Some(expr) => expr_from_text(&format!("return {expr}")), None => expr_from_text("return"), } } pub fn expr_try(expr: ast::Expr) -> ast::Expr { - expr_from_text(&format!("{}?", expr)) + expr_from_text(&format!("{expr}?")) } pub fn expr_await(expr: ast::Expr) -> ast::Expr { - expr_from_text(&format!("{}.await", expr)) + expr_from_text(&format!("{expr}.await")) } pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr { - expr_from_text(&format!("match {} {}", expr, match_arm_list)) + expr_from_text(&format!("match {expr} {match_arm_list}")) } pub fn expr_if( condition: ast::Expr, @@ -415,66 +417,67 @@ pub fn expr_if( else_branch: Option, ) -> ast::Expr { let else_branch = match else_branch { - Some(ast::ElseBranch::Block(block)) => format!("else {}", block), - Some(ast::ElseBranch::IfExpr(if_expr)) => format!("else {}", if_expr), + Some(ast::ElseBranch::Block(block)) => format!("else {block}"), + Some(ast::ElseBranch::IfExpr(if_expr)) => format!("else {if_expr}"), None => String::new(), }; - expr_from_text(&format!("if {} {} {}", condition, then_branch, else_branch)) + expr_from_text(&format!("if {condition} {then_branch} {else_branch}")) } pub fn expr_for_loop(pat: ast::Pat, expr: ast::Expr, block: ast::BlockExpr) -> ast::Expr { - expr_from_text(&format!("for {} in {} {}", pat, expr, block)) + expr_from_text(&format!("for {pat} in {expr} {block}")) } pub fn expr_loop(block: ast::BlockExpr) -> ast::Expr { - expr_from_text(&format!("loop {}", block)) + expr_from_text(&format!("loop {block}")) } pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr { let token = token(op); - expr_from_text(&format!("{}{}", token, expr)) + expr_from_text(&format!("{token}{expr}")) } pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr { - expr_from_text(&format!("{}{}", f, arg_list)) + expr_from_text(&format!("{f}{arg_list}")) } pub fn expr_method_call( receiver: ast::Expr, method: ast::NameRef, arg_list: ast::ArgList, ) -> ast::Expr { - expr_from_text(&format!("{}.{}{}", receiver, method, arg_list)) + expr_from_text(&format!("{receiver}.{method}{arg_list}")) } pub fn expr_macro_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr { - expr_from_text(&format!("{}!{}", f, arg_list)) + expr_from_text(&format!("{f}!{arg_list}")) } pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr { - expr_from_text(&if exclusive { format!("&mut {}", expr) } else { format!("&{}", expr) }) + expr_from_text(&if exclusive { format!("&mut {expr}") } else { format!("&{expr}") }) } pub fn expr_closure(pats: impl IntoIterator, expr: ast::Expr) -> ast::Expr { let params = pats.into_iter().join(", "); - expr_from_text(&format!("|{}| {}", params, expr)) + expr_from_text(&format!("|{params}| {expr}")) } pub fn expr_field(receiver: ast::Expr, field: &str) -> ast::Expr { - expr_from_text(&format!("{}.{}", receiver, field)) + expr_from_text(&format!("{receiver}.{field}")) } pub fn expr_paren(expr: ast::Expr) -> ast::Expr { - expr_from_text(&format!("({})", expr)) + expr_from_text(&format!("({expr})")) } pub fn expr_tuple(elements: impl IntoIterator) -> ast::Expr { let expr = elements.into_iter().format(", "); - expr_from_text(&format!("({})", expr)) + expr_from_text(&format!("({expr})")) } pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr { - expr_from_text(&format!("{} = {}", lhs, rhs)) + expr_from_text(&format!("{lhs} = {rhs}")) } fn expr_from_text(text: &str) -> ast::Expr { - ast_from_text(&format!("const C: () = {};", text)) + ast_from_text(&format!("const C: () = {text};")) } pub fn expr_let(pattern: ast::Pat, expr: ast::Expr) -> ast::LetExpr { - ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr)) + ast_from_text(&format!("const _: () = while let {pattern} = {expr} {{}};")) } pub fn arg_list(args: impl IntoIterator) -> ast::ArgList { - ast_from_text(&format!("fn main() {{ ()({}) }}", args.into_iter().format(", "))) + let args = args.into_iter().format(", "); + ast_from_text(&format!("fn main() {{ ()({args}) }}")) } pub fn ident_pat(ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat { @@ -485,7 +488,7 @@ pub fn ident_pat(ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat { if mut_ { s.push_str("mut "); } - format_to!(s, "{}", name); + format_to!(s, "{name}"); s.push_str(": ())"); ast_from_text(&s) } @@ -494,7 +497,7 @@ pub fn wildcard_pat() -> ast::WildcardPat { return from_text("_"); fn from_text(text: &str) -> ast::WildcardPat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } @@ -502,7 +505,7 @@ pub fn literal_pat(lit: &str) -> ast::LiteralPat { return from_text(lit); fn from_text(text: &str) -> ast::LiteralPat { - ast_from_text(&format!("fn f() {{ match x {{ {} => {{}} }} }}", text)) + ast_from_text(&format!("fn f() {{ match x {{ {text} => {{}} }} }}")) } } @@ -515,10 +518,10 @@ pub fn tuple_pat(pats: impl IntoIterator) -> ast::TuplePat { if count == 1 { pats_str.push(','); } - return from_text(&format!("({})", pats_str)); + return from_text(&format!("({pats_str})")); fn from_text(text: &str) -> ast::TuplePat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } @@ -527,46 +530,46 @@ pub fn tuple_struct_pat( pats: impl IntoIterator, ) -> ast::TupleStructPat { let pats_str = pats.into_iter().join(", "); - return from_text(&format!("{}({})", path, pats_str)); + return from_text(&format!("{path}({pats_str})")); fn from_text(text: &str) -> ast::TupleStructPat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } pub fn record_pat(path: ast::Path, pats: impl IntoIterator) -> ast::RecordPat { let pats_str = pats.into_iter().join(", "); - return from_text(&format!("{} {{ {} }}", path, pats_str)); + return from_text(&format!("{path} {{ {pats_str} }}")); fn from_text(text: &str) -> ast::RecordPat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } pub fn record_pat_with_fields(path: ast::Path, fields: ast::RecordPatFieldList) -> ast::RecordPat { - ast_from_text(&format!("fn f({} {}: ()))", path, fields)) + ast_from_text(&format!("fn f({path} {fields}: ()))")) } pub fn record_pat_field_list( fields: impl IntoIterator, ) -> ast::RecordPatFieldList { let fields = fields.into_iter().join(", "); - ast_from_text(&format!("fn f(S {{ {} }}: ()))", fields)) + ast_from_text(&format!("fn f(S {{ {fields} }}: ()))")) } pub fn record_pat_field(name_ref: ast::NameRef, pat: ast::Pat) -> ast::RecordPatField { - ast_from_text(&format!("fn f(S {{ {}: {} }}: ()))", name_ref, pat)) + ast_from_text(&format!("fn f(S {{ {name_ref}: {pat} }}: ()))")) } pub fn record_pat_field_shorthand(name_ref: ast::NameRef) -> ast::RecordPatField { - ast_from_text(&format!("fn f(S {{ {} }}: ()))", name_ref)) + ast_from_text(&format!("fn f(S {{ {name_ref} }}: ()))")) } /// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise. pub fn path_pat(path: ast::Path) -> ast::Pat { return from_text(&path.to_string()); fn from_text(text: &str) -> ast::Pat { - ast_from_text(&format!("fn f({}: ())", text)) + ast_from_text(&format!("fn f({text}: ())")) } } @@ -577,12 +580,12 @@ pub fn match_arm( ) -> ast::MatchArm { let pats_str = pats.into_iter().join(" | "); return match guard { - Some(guard) => from_text(&format!("{} if {} => {}", pats_str, guard, expr)), - None => from_text(&format!("{} => {}", pats_str, expr)), + Some(guard) => from_text(&format!("{pats_str} if {guard} => {expr}")), + None => from_text(&format!("{pats_str} => {expr}")), }; fn from_text(text: &str) -> ast::MatchArm { - ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text)) + ast_from_text(&format!("fn f() {{ match () {{{text}}} }}")) } } @@ -592,10 +595,10 @@ pub fn match_arm_with_guard( expr: ast::Expr, ) -> ast::MatchArm { let pats_str = pats.into_iter().join(" | "); - return from_text(&format!("{} if {} => {}", pats_str, guard, expr)); + return from_text(&format!("{pats_str} if {guard} => {expr}")); fn from_text(text: &str) -> ast::MatchArm { - ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text)) + ast_from_text(&format!("fn f() {{ match () {{{text}}} }}")) } } @@ -605,13 +608,14 @@ pub fn match_arm_list(arms: impl IntoIterator) -> ast::Mat .map(|arm| { let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like()); let comma = if needs_comma { "," } else { "" }; - format!(" {}{}\n", arm.syntax(), comma) + let arm = arm.syntax(); + format!(" {arm}{comma}\n") }) .collect::(); return from_text(&arms_str); fn from_text(text: &str) -> ast::MatchArmList { - ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text)) + ast_from_text(&format!("fn f() {{ match () {{\n{text}}} }}")) } } @@ -620,10 +624,10 @@ pub fn where_pred( bounds: impl IntoIterator, ) -> ast::WherePred { let bounds = bounds.into_iter().join(" + "); - return from_text(&format!("{}: {}", path, bounds)); + return from_text(&format!("{path}: {bounds}")); fn from_text(text: &str) -> ast::WherePred { - ast_from_text(&format!("fn f() where {} {{ }}", text)) + ast_from_text(&format!("fn f() where {text} {{ }}")) } } @@ -632,7 +636,7 @@ pub fn where_clause(preds: impl IntoIterator) -> ast::Whe return from_text(preds.as_str()); fn from_text(text: &str) -> ast::WhereClause { - ast_from_text(&format!("fn f() where {} {{ }}", text)) + ast_from_text(&format!("fn f() where {text} {{ }}")) } } @@ -642,19 +646,19 @@ pub fn let_stmt( initializer: Option, ) -> ast::LetStmt { let mut text = String::new(); - format_to!(text, "let {}", pattern); + format_to!(text, "let {pattern}"); if let Some(ty) = ty { - format_to!(text, ": {}", ty); + format_to!(text, ": {ty}"); } match initializer { - Some(it) => format_to!(text, " = {};", it), + Some(it) => format_to!(text, " = {it};"), None => format_to!(text, ";"), }; - ast_from_text(&format!("fn f() {{ {} }}", text)) + ast_from_text(&format!("fn f() {{ {text} }}")) } pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt { let semi = if expr.is_block_like() { "" } else { ";" }; - ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi)) + ast_from_text(&format!("fn f() {{ {expr}{semi} (); }}")) } pub fn item_const( @@ -665,13 +669,13 @@ pub fn item_const( ) -> ast::Const { let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!("{} const {}: {} = {};", visibility, name, ty, expr)) + ast_from_text(&format!("{visibility} const {name}: {ty} = {expr};")) } pub fn param(pat: ast::Pat, ty: ast::Type) -> ast::Param { - ast_from_text(&format!("fn f({}: {}) {{ }}", pat, ty)) + ast_from_text(&format!("fn f({pat}: {ty}) {{ }}")) } pub fn self_param() -> ast::SelfParam { @@ -679,7 +683,7 @@ pub fn self_param() -> ast::SelfParam { } pub fn ret_type(ty: ast::Type) -> ast::RetType { - ast_from_text(&format!("fn f() -> {} {{ }}", ty)) + ast_from_text(&format!("fn f() -> {ty} {{ }}")) } pub fn param_list( @@ -688,30 +692,30 @@ pub fn param_list( ) -> ast::ParamList { let args = pats.into_iter().join(", "); let list = match self_param { - Some(self_param) if args.is_empty() => format!("fn f({}) {{ }}", self_param), - Some(self_param) => format!("fn f({}, {}) {{ }}", self_param, args), - None => format!("fn f({}) {{ }}", args), + Some(self_param) if args.is_empty() => format!("fn f({self_param}) {{ }}"), + Some(self_param) => format!("fn f({self_param}, {args}) {{ }}"), + None => format!("fn f({args}) {{ }}"), }; ast_from_text(&list) } pub fn type_param(name: ast::Name, ty: Option) -> ast::TypeParam { let bound = match ty { - Some(it) => format!(": {}", it), + Some(it) => format!(": {it}"), None => String::new(), }; - ast_from_text(&format!("fn f<{}{}>() {{ }}", name, bound)) + ast_from_text(&format!("fn f<{name}{bound}>() {{ }}")) } pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam { - ast_from_text(&format!("fn f<{}>() {{ }}", lifetime)) + ast_from_text(&format!("fn f<{lifetime}>() {{ }}")) } pub fn generic_param_list( pats: impl IntoIterator, ) -> ast::GenericParamList { let args = pats.into_iter().join(", "); - ast_from_text(&format!("fn f<{}>() {{ }}", args)) + ast_from_text(&format!("fn f<{args}>() {{ }}")) } pub fn visibility_pub_crate() -> ast::Visibility { @@ -724,33 +728,33 @@ pub fn visibility_pub() -> ast::Visibility { pub fn tuple_field_list(fields: impl IntoIterator) -> ast::TupleFieldList { let fields = fields.into_iter().join(", "); - ast_from_text(&format!("struct f({});", fields)) + ast_from_text(&format!("struct f({fields});")) } pub fn record_field_list( fields: impl IntoIterator, ) -> ast::RecordFieldList { let fields = fields.into_iter().join(", "); - ast_from_text(&format!("struct f {{ {} }}", fields)) + ast_from_text(&format!("struct f {{ {fields} }}")) } pub fn tuple_field(visibility: Option, ty: ast::Type) -> ast::TupleField { let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!("struct f({}{});", visibility, ty)) + ast_from_text(&format!("struct f({visibility}{ty});")) } pub fn variant(name: ast::Name, field_list: Option) -> ast::Variant { let field_list = match field_list { None => String::new(), Some(it) => match it { - ast::FieldList::RecordFieldList(record) => format!(" {}", record), - ast::FieldList::TupleFieldList(tuple) => format!("{}", tuple), + ast::FieldList::RecordFieldList(record) => format!(" {record}"), + ast::FieldList::TupleFieldList(tuple) => format!("{tuple}"), }, }; - ast_from_text(&format!("enum f {{ {}{} }}", name, field_list)) + ast_from_text(&format!("enum f {{ {name}{field_list} }}")) } pub fn fn_( @@ -763,23 +767,22 @@ pub fn fn_( is_async: bool, ) -> ast::Fn { let type_params = match type_params { - Some(type_params) => format!("{}", type_params), + Some(type_params) => format!("{type_params}"), None => "".into(), }; let ret_type = match ret_type { - Some(ret_type) => format!("{} ", ret_type), + Some(ret_type) => format!("{ret_type} "), None => "".into(), }; let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; let async_literal = if is_async { "async " } else { "" }; ast_from_text(&format!( - "{}{}fn {}{}{} {}{}", - visibility, async_literal, fn_name, type_params, params, ret_type, body + "{visibility}{async_literal}fn {fn_name}{type_params}{params} {ret_type}{body}", )) } @@ -793,13 +796,10 @@ pub fn struct_( let type_params = generic_param_list.map_or_else(String::new, |it| it.to_string()); let visibility = match visibility { None => String::new(), - Some(it) => format!("{} ", it), + Some(it) => format!("{it} "), }; - ast_from_text(&format!( - "{}struct {}{}{}{}", - visibility, strukt_name, type_params, field_list, semicolon - )) + ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",)) } #[track_caller] @@ -808,7 +808,8 @@ fn ast_from_text(text: &str) -> N { let node = match parse.tree().syntax().descendants().find_map(N::cast) { Some(it) => it, None => { - panic!("Failed to make ast node `{}` from text {}", std::any::type_name::(), text) + let node = std::any::type_name::(); + panic!("Failed to make ast node `{node}` from text {text}") } }; let node = node.clone_subtree(); @@ -824,7 +825,7 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken { .descendants_with_tokens() .filter_map(|it| it.into_token()) .find(|it| it.kind() == kind) - .unwrap_or_else(|| panic!("unhandled token: {:?}", kind)) + .unwrap_or_else(|| panic!("unhandled token: {kind:?}")) } pub mod tokens { @@ -863,7 +864,7 @@ pub mod tokens { pub fn literal(text: &str) -> SyntaxToken { assert_eq!(text.trim(), text); - let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text)); + let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {text}; }}")); lit.syntax().first_child_or_token().unwrap().into_token().unwrap() } diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 28976d837b..ba72e64425 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -322,7 +322,7 @@ impl ast::IntNumber { pub fn float_value(&self) -> Option { let (_, text, _) = self.split_into_parts(); - text.parse::().ok() + text.replace('_', "").parse::().ok() } } @@ -361,7 +361,7 @@ impl ast::FloatNumber { pub fn value(&self) -> Option { let (text, _) = self.split_into_parts(); - text.parse::().ok() + text.replace('_', "").parse::().ok() } } @@ -397,6 +397,15 @@ mod tests { assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into()); } + fn check_float_value(lit: &str, expected: impl Into> + Copy) { + assert_eq!(FloatNumber { syntax: make::tokens::literal(lit) }.value(), expected.into()); + assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.float_value(), expected.into()); + } + + fn check_int_value(lit: &str, expected: impl Into>) { + assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.value(), expected.into()); + } + #[test] fn test_float_number_suffix() { check_float_suffix("123.0", None); @@ -437,6 +446,14 @@ mod tests { check_string_value(r"\nfoobar", "\nfoobar"); check_string_value(r"C:\\Windows\\System32\\", "C:\\Windows\\System32\\"); } + + #[test] + fn test_value_underscores() { + check_float_value("3.141592653589793_f64", 3.141592653589793_f64); + check_float_value("1__0.__0__f32", 10.0); + check_int_value("0b__1_0_", 2); + check_int_value("1_1_1_1_1_1", 111111); + } } impl ast::Char { diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index 6d27662251..70b54843db 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -169,10 +169,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { quote! { impl AstNode for #name { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - #(#kinds)|* => true, - _ => false, - } + matches!(kind, #(#kinds)|*) } fn cast(syntax: SyntaxNode) -> Option { let res = match syntax.kind() { @@ -253,10 +250,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { } impl AstNode for #name { fn can_cast(kind: SyntaxKind) -> bool { - match kind { - #(#kinds)|* => true, - _ => false, - } + matches!(kind, #(#kinds)|*) } fn cast(syntax: SyntaxNode) -> Option { Self::can_cast(syntax.kind()).then(|| #name { syntax }) @@ -410,24 +404,17 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String { impl SyntaxKind { pub fn is_keyword(self) -> bool { - match self { - #(#all_keywords)|* => true, - _ => false, - } + matches!(self, #(#all_keywords)|*) } pub fn is_punct(self) -> bool { - match self { - #(#punctuation)|* => true, - _ => false, - } + + matches!(self, #(#punctuation)|*) + } pub fn is_literal(self) -> bool { - match self { - #(#literals)|* => true, - _ => false, - } + matches!(self, #(#literals)|*) } pub fn from_keyword(ident: &str) -> Option { diff --git a/lib/la-arena/src/lib.rs b/lib/la-arena/src/lib.rs index 50e8d06b66..ccaaf39917 100644 --- a/lib/la-arena/src/lib.rs +++ b/lib/la-arena/src/lib.rs @@ -322,6 +322,43 @@ impl Arena { .map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value)) } + /// Returns an iterator over the arena’s values. + /// + /// ``` + /// let mut arena = la_arena::Arena::new(); + /// let idx1 = arena.alloc(20); + /// let idx2 = arena.alloc(40); + /// let idx3 = arena.alloc(60); + /// + /// let mut iterator = arena.values(); + /// assert_eq!(iterator.next(), Some(&20)); + /// assert_eq!(iterator.next(), Some(&40)); + /// assert_eq!(iterator.next(), Some(&60)); + /// ``` + pub fn values(&mut self) -> impl Iterator + ExactSizeIterator + DoubleEndedIterator { + self.data.iter() + } + + /// Returns an iterator over the arena’s mutable values. + /// + /// ``` + /// let mut arena = la_arena::Arena::new(); + /// let idx1 = arena.alloc(20); + /// + /// assert_eq!(arena[idx1], 20); + /// + /// let mut iterator = arena.values_mut(); + /// *iterator.next().unwrap() = 10; + /// drop(iterator); + /// + /// assert_eq!(arena[idx1], 10); + /// ``` + pub fn values_mut( + &mut self, + ) -> impl Iterator + ExactSizeIterator + DoubleEndedIterator { + self.data.iter_mut() + } + /// Reallocates the arena to make it take up as little space as possible. pub fn shrink_to_fit(&mut self) { self.data.shrink_to_fit();