Merge branch 'main' into micha/python-extension-environment

This commit is contained in:
Dhruv Manilawala 2025-07-02 14:42:06 +05:30
commit 8dee3656ae
212 changed files with 8014 additions and 2540 deletions

View file

@ -49,7 +49,7 @@ jobs:
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build sdist"
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with:
command: sdist
args: --out dist
@ -79,7 +79,7 @@ jobs:
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - x86_64"
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with:
target: x86_64
args: --release --locked --out dist
@ -121,7 +121,7 @@ jobs:
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels - aarch64"
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with:
target: aarch64
args: --release --locked --out dist
@ -177,7 +177,7 @@ jobs:
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with:
target: ${{ matrix.platform.target }}
args: --release --locked --out dist
@ -230,7 +230,7 @@ jobs:
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with:
target: ${{ matrix.target }}
manylinux: auto
@ -304,7 +304,7 @@ jobs:
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with:
target: ${{ matrix.platform.target }}
manylinux: auto
@ -370,7 +370,7 @@ jobs:
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
@ -435,7 +435,7 @@ jobs:
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2

View file

@ -460,7 +460,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
name: Download Ruff binary to test
id: download-cached-binary
@ -661,7 +661,7 @@ jobs:
branch: ${{ github.event.pull_request.base.ref }}
workflow: "ci.yaml"
check_artifacts: true
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: Fuzz
env:
FORCE_COLOR: 1
@ -712,7 +712,7 @@ jobs:
- name: "Prep README.md"
run: python scripts/transform_readme.py --target pypi
- name: "Build wheels"
uses: PyO3/maturin-action@35be3186fc8e037e329f06b68dcd807d83dcc6dc # v1.49.2
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
with:
args: --out dist
- name: "Test wheel"
@ -731,7 +731,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
@ -774,7 +774,7 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- name: Install uv
uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt --system
@ -906,7 +906,7 @@ jobs:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Rust toolchain"
run: rustup show
@ -939,7 +939,7 @@ jobs:
persist-credentials: false
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Rust toolchain"
run: rustup show

View file

@ -34,7 +34,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- name: "Install Rust toolchain"
run: rustup show
- name: "Install mold"

View file

@ -37,7 +37,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
with:
@ -48,6 +48,8 @@ jobs:
- name: Run mypy_primer
shell: bash
env:
TY_MEMORY_REPORT: mypy_primer
run: |
cd ruff

View file

@ -22,7 +22,7 @@ jobs:
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: wheels-*

View file

@ -32,7 +32,7 @@ jobs:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@445689ea25e0de0a23313031f5fe577c74ae45a1 # v6.3.0
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
with:

View file

@ -81,7 +81,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.13
rev: v0.12.1
hooks:
- id: ruff-format
- id: ruff
@ -91,7 +91,7 @@ repos:
# Prettier
- repo: https://github.com/rbubley/mirrors-prettier
rev: v3.5.3
rev: v3.6.2
hooks:
- id: prettier
types: [yaml]
@ -99,12 +99,12 @@ repos:
# zizmor detects security vulnerabilities in GitHub Actions workflows.
# Additional configuration for the tool is found in `.github/zizmor.yml`
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.9.0
rev: v1.10.0
hooks:
- id: zizmor
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.33.0
rev: 0.33.1
hooks:
- id: check-github-workflows

86
Cargo.lock generated
View file

@ -132,6 +132,15 @@ version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
[[package]]
name = "approx"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6"
dependencies = [
"num-traits",
]
[[package]]
name = "arc-swap"
version = "1.7.1"
@ -211,6 +220,15 @@ version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "bincode"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
dependencies = [
"serde",
]
[[package]]
name = "bincode"
version = "2.0.1"
@ -449,9 +467,9 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
[[package]]
name = "clearscreen"
version = "4.0.1"
version = "4.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c41dc435a7b98e4608224bbf65282309f5403719df9113621b30f8b6f74e2f4"
checksum = "85a8ab73a1c02b0c15597b22e09c7dc36e63b2f601f9d1e83ac0c3decd38b1ae"
dependencies = [
"nix 0.29.0",
"terminfo",
@ -462,22 +480,27 @@ dependencies = [
[[package]]
name = "codspeed"
version = "2.10.1"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93f4cce9c27c49c4f101fffeebb1826f41a9df2e7498b7cd4d95c0658b796c6c"
checksum = "922018102595f6668cdd09c03f4bff2d951ce2318c6dca4fe11bdcb24b65b2bf"
dependencies = [
"anyhow",
"bincode 1.3.3",
"colored 2.2.0",
"glob",
"libc",
"nix 0.29.0",
"serde",
"serde_json",
"statrs",
"uuid",
]
[[package]]
name = "codspeed-criterion-compat"
version = "2.10.1"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3c23d880a28a2aab52d38ca8481dd7a3187157d0a952196b6db1db3c8499725"
checksum = "24d8ad82d2383cb74995f58993cbdd2914aed57b2f91f46580310dd81dc3d05a"
dependencies = [
"codspeed",
"codspeed-criterion-compat-walltime",
@ -486,9 +509,9 @@ dependencies = [
[[package]]
name = "codspeed-criterion-compat-walltime"
version = "2.10.1"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b0a2f7365e347f4f22a67e9ea689bf7bc89900a354e22e26cf8a531a42c8fbb"
checksum = "61badaa6c452d192a29f8387147888f0ab358553597c3fe9bf8a162ef7c2fa64"
dependencies = [
"anes",
"cast",
@ -511,9 +534,9 @@ dependencies = [
[[package]]
name = "codspeed-divan-compat"
version = "2.10.1"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8620a09dfaf37b3c45f982c4b65bd8f9b0203944da3ffa705c0fcae6b84655ff"
checksum = "3acf1d6fe367c2ff5ff136ca723f678490c3691d59d7f2b83d5e53b7b25ac91e"
dependencies = [
"codspeed",
"codspeed-divan-compat-macros",
@ -522,9 +545,9 @@ dependencies = [
[[package]]
name = "codspeed-divan-compat-macros"
version = "2.10.1"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30fe872bc4214626b35d3a1706a905d0243503bb6ba3bb7be2fc59083d5d680c"
checksum = "bcfa2013d7bee54a497d0e1410751d5de690fd67a3e9eb728ca049b6a3d16d0b"
dependencies = [
"divan-macros",
"itertools 0.14.0",
@ -536,9 +559,9 @@ dependencies = [
[[package]]
name = "codspeed-divan-compat-walltime"
version = "2.10.1"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "104caa97b36d4092d89e24e4b103b40ede1edab03c0372d19e14a33f9393132b"
checksum = "e513100fb0e7ba02fb3824546ecd2abfb8f334262f0972225b463aad07f99ff0"
dependencies = [
"cfg-if",
"clap",
@ -1116,9 +1139,9 @@ dependencies = [
[[package]]
name = "get-size-derive2"
version = "0.5.0"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea12180b12b82e9b7c01dfe91138208604961bb2bd7e93058d6786e5d770b104"
checksum = "1aac2af9f9a6a50e31b1e541d05b7925add83d3982c2793193fe9d4ee584323c"
dependencies = [
"attribute-derive",
"quote",
@ -1127,9 +1150,9 @@ dependencies = [
[[package]]
name = "get-size2"
version = "0.5.0"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a37e4d438f7550dbd4938f1bcde41538653616513678d647665a7332ea3c030"
checksum = "624a0312efd19e1c45922dfcc2d6806d3ffc4bca261f89f31fcc4f63f438d885"
dependencies = [
"compact_str",
"get-size-derive2",
@ -1447,9 +1470,9 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.9.0"
version = "2.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
dependencies = [
"equivalent",
"hashbrown 0.15.4",
@ -2080,9 +2103,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "ordermap"
version = "0.5.7"
version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d31b8b7a99f71bdff4235faf9ce9eada0ad3562c8fbeb7d607d9f41a6ec569d"
checksum = "6d6bff06e4a5dc6416bead102d3e63c480dd852ffbb278bf8cfeb4966b329609"
dependencies = [
"indexmap",
"serde",
@ -2696,13 +2719,14 @@ dependencies = [
"anyhow",
"argfile",
"assert_fs",
"bincode",
"bincode 2.0.1",
"bitflags 2.9.1",
"cachedir",
"clap",
"clap_complete_command",
"clearscreen",
"colored 3.0.0",
"dunce",
"filetime",
"globwalk",
"ignore",
@ -2946,6 +2970,7 @@ dependencies = [
"fern",
"glob",
"globset",
"hashbrown 0.15.4",
"imperative",
"insta",
"is-macro",
@ -3669,6 +3694,16 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "statrs"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a3fe7c28c6512e766b0874335db33c94ad7b8f9054228ae1c2abd47ce7d335e"
dependencies = [
"approx",
"num-traits",
]
[[package]]
name = "strip-ansi-escapes"
version = "0.2.1"
@ -4705,11 +4740,10 @@ dependencies = [
[[package]]
name = "which"
version = "7.0.3"
version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762"
checksum = "d3fabb953106c3c8eea8306e4393700d7657561cb43122571b172bbfb7c7ba1d"
dependencies = [
"either",
"env_home",
"rustix",
"winsafe",

View file

@ -5,7 +5,7 @@ resolver = "2"
[workspace.package]
# Please update rustfmt.toml when bumping the Rust edition
edition = "2024"
rust-version = "1.85"
rust-version = "1.86"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@ -62,8 +62,8 @@ camino = { version = "1.1.7" }
clap = { version = "4.5.3", features = ["derive"] }
clap_complete_command = { version = "0.6.0" }
clearscreen = { version = "4.0.0" }
divan = { package = "codspeed-divan-compat", version = "2.10.1" }
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
divan = { package = "codspeed-divan-compat", version = "3.0.2" }
codspeed-criterion-compat = { version = "3.0.2", default-features = false }
colored = { version = "3.0.0" }
console_error_panic_hook = { version = "0.1.7" }
console_log = { version = "1.0.0" }
@ -227,6 +227,7 @@ unnecessary_debug_formatting = "allow" # too many instances, the display also d
# Without the hashes we run into a `rustfmt` bug in some snapshot tests, see #13250
needless_raw_string_hashes = "allow"
# Disallowed restriction lints
ignore_without_reason = "allow" # Too many exsisting instances, and there's no auto fix.
print_stdout = "warn"
print_stderr = "warn"
dbg_macro = "warn"

View file

@ -423,6 +423,7 @@ Ruff is used by a number of major open-source projects and companies, including:
- [Albumentations](https://github.com/albumentations-team/albumentations)
- Amazon ([AWS SAM](https://github.com/aws/serverless-application-model))
- [Anki](https://apps.ankiweb.net/)
- Anthropic ([Python SDK](https://github.com/anthropics/anthropic-sdk-python))
- [Apache Airflow](https://github.com/apache/airflow)
- AstraZeneca ([Magnus](https://github.com/AstraZeneca/magnus-core))

View file

@ -68,6 +68,7 @@ ruff_linter = { workspace = true, features = ["clap", "test-rules"] }
assert_fs = { workspace = true }
# Avoid writing colored snapshots when running tests from the terminal
colored = { workspace = true, features = ["no-color"] }
dunce = { workspace = true }
indoc = { workspace = true }
insta = { workspace = true, features = ["filters", "json"] }
insta-cmd = { workspace = true }

View file

@ -6,7 +6,6 @@ use anyhow::Result;
use bitflags::bitflags;
use colored::Colorize;
use itertools::{Itertools, iterate};
use ruff_linter::codes::NoqaCode;
use ruff_linter::linter::FixTable;
use serde::Serialize;
@ -15,7 +14,7 @@ use ruff_linter::logging::LogLevel;
use ruff_linter::message::{
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
JsonEmitter, JsonLinesEmitter, JunitEmitter, OldDiagnostic, PylintEmitter, RdjsonEmitter,
SarifEmitter, TextEmitter,
SarifEmitter, SecondaryCode, TextEmitter,
};
use ruff_linter::notify_user;
use ruff_linter::settings::flags::{self};
@ -36,8 +35,8 @@ bitflags! {
}
#[derive(Serialize)]
struct ExpandedStatistics {
code: Option<NoqaCode>,
struct ExpandedStatistics<'a> {
code: Option<&'a SecondaryCode>,
name: &'static str,
count: usize,
fixable: bool,
@ -303,11 +302,12 @@ impl Printer {
let statistics: Vec<ExpandedStatistics> = diagnostics
.inner
.iter()
.map(|message| (message.noqa_code(), message))
.map(|message| (message.secondary_code(), message))
.sorted_by_key(|(code, message)| (*code, message.fixable()))
.fold(
vec![],
|mut acc: Vec<((Option<NoqaCode>, &OldDiagnostic), usize)>, (code, message)| {
|mut acc: Vec<((Option<&SecondaryCode>, &OldDiagnostic), usize)>,
(code, message)| {
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
if *prev_code == code {
*count += 1;
@ -349,12 +349,7 @@ impl Printer {
);
let code_width = statistics
.iter()
.map(|statistic| {
statistic
.code
.map_or_else(String::new, |rule| rule.to_string())
.len()
})
.map(|statistic| statistic.code.map_or(0, |s| s.len()))
.max()
.unwrap();
let any_fixable = statistics.iter().any(|statistic| statistic.fixable);
@ -370,7 +365,8 @@ impl Printer {
statistic.count.to_string().bold(),
statistic
.code
.map_or_else(String::new, |rule| rule.to_string())
.map(SecondaryCode::as_str)
.unwrap_or_default()
.red()
.bold(),
if any_fixable {

View file

@ -612,7 +612,7 @@ fn extend_passed_via_config_argument() {
#[test]
fn nonexistent_extend_file() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
fs::write(
project_dir.join("ruff.toml"),
r#"
@ -653,7 +653,7 @@ extend = "ruff3.toml"
#[test]
fn circular_extend() -> Result<()> {
let tempdir = TempDir::new()?;
let project_path = tempdir.path().canonicalize()?;
let project_path = dunce::canonicalize(tempdir.path())?;
fs::write(
project_path.join("ruff.toml"),
@ -698,7 +698,7 @@ extend = "ruff.toml"
#[test]
fn parse_error_extends() -> Result<()> {
let tempdir = TempDir::new()?;
let project_path = tempdir.path().canonicalize()?;
let project_path = dunce::canonicalize(tempdir.path())?;
fs::write(
project_path.join("ruff.toml"),
@ -2130,7 +2130,7 @@ select = ["UP006"]
#[test]
fn requires_python_no_tool() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("pyproject.toml");
fs::write(
&ruff_toml,
@ -2441,7 +2441,7 @@ requires-python = ">= 3.11"
#[test]
fn requires_python_no_tool_target_version_override() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("pyproject.toml");
fs::write(
&ruff_toml,
@ -2752,7 +2752,7 @@ requires-python = ">= 3.11"
#[test]
fn requires_python_no_tool_with_check() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("pyproject.toml");
fs::write(
&ruff_toml,
@ -2797,7 +2797,7 @@ requires-python = ">= 3.11"
#[test]
fn requires_python_ruff_toml_no_target_fallback() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
@ -3118,7 +3118,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_ruff_toml_no_target_fallback_check() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
@ -3173,7 +3173,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_pyproject_toml_above() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let outer_pyproject = tempdir.path().join("pyproject.toml");
fs::write(
&outer_pyproject,
@ -3200,7 +3200,7 @@ from typing import Union;foo: Union[int, str] = 1
"#,
)?;
let testpy_canon = testpy.canonicalize()?;
let testpy_canon = dunce::canonicalize(testpy)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/foo/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/"),(r"(?m)^foo\\test","foo/test")]
@ -3499,7 +3499,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_pyproject_toml_above_with_tool() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let outer_pyproject = tempdir.path().join("pyproject.toml");
fs::write(
&outer_pyproject,
@ -3528,7 +3528,7 @@ from typing import Union;foo: Union[int, str] = 1
"#,
)?;
let testpy_canon = testpy.canonicalize()?;
let testpy_canon = dunce::canonicalize(testpy)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/foo/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/"),(r"foo\\","foo/")]
@ -3827,7 +3827,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_ruff_toml_above() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
@ -3856,7 +3856,7 @@ from typing import Union;foo: Union[int, str] = 1
"#,
)?;
let testpy_canon = testpy.canonicalize()?;
let testpy_canon = dunce::canonicalize(testpy)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/foo/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/")]
@ -4441,7 +4441,7 @@ from typing import Union;foo: Union[int, str] = 1
#[test]
fn requires_python_extend_from_shared_config() -> Result<()> {
let tempdir = TempDir::new()?;
let project_dir = tempdir.path().canonicalize()?;
let project_dir = dunce::canonicalize(tempdir.path())?;
let ruff_toml = tempdir.path().join("ruff.toml");
fs::write(
&ruff_toml,
@ -4479,7 +4479,7 @@ from typing import Union;foo: Union[int, str] = 1
"#,
)?;
let testpy_canon = testpy.canonicalize()?;
let testpy_canon = dunce::canonicalize(testpy)?;
insta::with_settings!({
filters => vec![(tempdir_filter(&testpy_canon).as_str(), "[TMP]/test.py"),(tempdir_filter(&project_dir).as_str(), "[TMP]/")]

View file

@ -12,10 +12,8 @@ fn display_default_settings() -> anyhow::Result<()> {
// Tempdir path's on macos are symlinks, which doesn't play nicely with
// our snapshot filtering.
let project_dir = tempdir
.path()
.canonicalize()
.context("Failed to canonical tempdir path.")?;
let project_dir =
dunce::canonicalize(tempdir.path()).context("Failed to canonical tempdir path.")?;
std::fs::write(
project_dir.join("pyproject.toml"),

View file

@ -821,11 +821,7 @@ impl DisplaySourceAnnotation<'_> {
// Length of this annotation as displayed in the stderr output
fn len(&self) -> usize {
// Account for usize underflows
if self.range.1 > self.range.0 {
self.range.1 - self.range.0
} else {
self.range.0 - self.range.1
}
self.range.1.abs_diff(self.range.0)
}
fn takes_space(&self) -> bool {

View file

@ -428,10 +428,6 @@ fn benchmark_complex_constrained_attributes_2(criterion: &mut Criterion) {
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
if isinstance(self.b, str):
return
"#,
)
},

View file

@ -637,6 +637,22 @@ pub trait FileResolver {
fn input(&self, file: File) -> Input;
}
impl<T> FileResolver for T
where
T: Db,
{
fn path(&self, file: File) -> &str {
relativize_path(self.system().current_directory(), file.path(self).as_str())
}
fn input(&self, file: File) -> Input {
Input {
text: source_text(self, file),
line_index: line_index(self, file),
}
}
}
impl FileResolver for &dyn Db {
fn path(&self, file: File) -> &str {
relativize_path(self.system().current_directory(), file.path(*self).as_str())
@ -708,7 +724,6 @@ fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
#[cfg(test)]
mod tests {
use crate::Upcast;
use crate::diagnostic::{Annotation, DiagnosticId, Severity, Span};
use crate::files::system_path_to_file;
use crate::system::{DbWithWritableSystem, SystemPath};
@ -2221,7 +2236,7 @@ watermelon
///
/// (This will set the "printed" flag on `Diagnostic`.)
fn render(&self, diag: &Diagnostic) -> String {
diag.display(&self.db.upcast(), &self.config).to_string()
diag.display(&self.db, &self.config).to_string()
}
}

View file

@ -36,12 +36,6 @@ pub trait Db: salsa::Database {
fn python_version(&self) -> PythonVersion;
}
/// Trait for upcasting a reference to a base trait object.
pub trait Upcast<T: ?Sized> {
fn upcast(&self) -> &T;
fn upcast_mut(&mut self) -> &mut T;
}
/// Returns the maximum number of tasks that ty is allowed
/// to process in parallel.
///
@ -76,11 +70,11 @@ pub trait RustDoc {
mod tests {
use std::sync::{Arc, Mutex};
use crate::Db;
use crate::files::Files;
use crate::system::TestSystem;
use crate::system::{DbWithTestSystem, System};
use crate::vendored::VendoredFileSystem;
use crate::{Db, Upcast};
type Events = Arc<Mutex<Vec<salsa::Event>>>;
@ -153,15 +147,6 @@ mod tests {
}
}
impl Upcast<dyn Db> for TestDb {
fn upcast(&self) -> &(dyn Db + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn Db + 'static) {
self
}
}
impl DbWithTestSystem for TestDb {
fn test_system(&self) -> &TestSystem {
&self.system

View file

@ -212,7 +212,7 @@ impl Display for Error {
path: Some(path),
err,
} => {
write!(f, "IO error for operation on {}: {}", path, err)
write!(f, "IO error for operation on {path}: {err}")
}
ErrorKind::Io { path: None, err } => err.fmt(f),
ErrorKind::NonUtf8Path { path } => {

View file

@ -2,10 +2,10 @@ use anyhow::{Context, Result};
use std::sync::Arc;
use zip::CompressionMethod;
use ruff_db::Db as SourceDb;
use ruff_db::files::{File, Files};
use ruff_db::system::{OsSystem, System, SystemPathBuf};
use ruff_db::vendored::{VendoredFileSystem, VendoredFileSystemBuilder};
use ruff_db::{Db as SourceDb, Upcast};
use ruff_python_ast::PythonVersion;
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{
@ -66,15 +66,6 @@ impl ModuleDb {
}
}
impl Upcast<dyn SourceDb> for ModuleDb {
fn upcast(&self) -> &(dyn SourceDb + 'static) {
self
}
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
self
}
}
#[salsa::db]
impl SourceDb for ModuleDb {
fn vendored(&self) -> &VendoredFileSystem {

View file

@ -191,6 +191,6 @@ where
#[expect(unsafe_code)]
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
let old_vec: &mut IndexVec<I, T> = unsafe { &mut *old_pointer };
unsafe { salsa::Update::maybe_update(&mut old_vec.raw, new_value.raw) }
unsafe { salsa::Update::maybe_update(&raw mut old_vec.raw, new_value.raw) }
}
}

View file

@ -38,6 +38,7 @@ colored = { workspace = true }
fern = { workspace = true }
glob = { workspace = true }
globset = { workspace = true }
hashbrown = { workspace = true }
imperative = { workspace = true }
is-macro = { workspace = true }
is-wsl = { workspace = true }

View file

@ -0,0 +1,5 @@
foo or{x: None for x in bar}
# C420 fix must make sure to insert a leading space if needed,
# See https://github.com/astral-sh/ruff/issues/18599

View file

@ -0,0 +1,2 @@
#!/usr/bin/env -S uv tool run ruff check --isolated --select EXE003
print("hello world")

View file

@ -0,0 +1,2 @@
#!/usr/bin/env -S uvx ruff check --isolated --select EXE003
print("hello world")

View file

@ -119,4 +119,26 @@ field35: "int | str | int" # Error
# Technically, this falls into the domain of the rule but it is an unlikely edge case,
# only works if you have from `__future__ import annotations` at the top of the file,
# and stringified annotations are discouraged in stub files.
field36: "int | str" | int # Ok
field36: "int | str" | int # Ok
# https://github.com/astral-sh/ruff/issues/18546
# Expand Optional[T] to Union[T, None]
# OK
field37: typing.Optional[int]
field38: typing.Union[int, None]
# equivalent to None
field39: typing.Optional[None]
# equivalent to int | None
field40: typing.Union[typing.Optional[int], None]
field41: typing.Optional[typing.Union[int, None]]
field42: typing.Union[typing.Optional[int], typing.Optional[int]]
field43: typing.Optional[int] | None
field44: typing.Optional[int | None]
field45: typing.Optional[int] | typing.Optional[int]
# equivalent to int | dict | None
field46: typing.Union[typing.Optional[int], typing.Optional[dict]]
field47: typing.Optional[int] | typing.Optional[dict]
# avoid reporting twice
field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
field49: typing.Optional[complex | complex] | complex

View file

@ -111,3 +111,25 @@ field33: typing.Union[typing.Union[int | int] | typing.Union[int | int]] # Error
# Test case for mixed union type
field34: typing.Union[list[int], str] | typing.Union[bytes, list[int]] # Error
# https://github.com/astral-sh/ruff/issues/18546
# Expand Optional[T] to Union[T, None]
# OK
field37: typing.Optional[int]
field38: typing.Union[int, None]
# equivalent to None
field39: typing.Optional[None]
# equivalent to int | None
field40: typing.Union[typing.Optional[int], None]
field41: typing.Optional[typing.Union[int, None]]
field42: typing.Union[typing.Optional[int], typing.Optional[int]]
field43: typing.Optional[int] | None
field44: typing.Optional[int | None]
field45: typing.Optional[int] | typing.Optional[int]
# equivalent to int | dict | None
field46: typing.Union[typing.Optional[int], typing.Optional[dict]]
field47: typing.Optional[int] | typing.Optional[dict]
# avoid reporting twice
field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
field49: typing.Optional[complex | complex] | complex

View file

@ -170,3 +170,25 @@ def foo():
v = {}
for o,(x,)in():
v[x,]=o
# https://github.com/astral-sh/ruff/issues/19005
def issue_19005_1():
c = {}
a = object()
for a.b in ():
c[a.b] = a.b
def issue_19005_2():
a = object()
c = {}
for a.k, a.v in ():
c[a.k] = a.v
def issue_19005_3():
a = [None, None]
c = {}
for a[0], a[1] in ():
c[a[0]] = a[1]

View file

@ -69,3 +69,11 @@ def func():
Returns:
the value
"""
def func():
("""Docstring.
Raises:
ValueError: An error.
""")

View file

@ -90,3 +90,52 @@ class AClass:
def myfunc(param: "tuple[Union[int, 'AClass', None], str]"):
print(param)
from typing import NamedTuple, Union
import typing_extensions
from typing_extensions import (
NamedTuple as NamedTupleTE,
Union as UnionTE,
)
# Regression test for https://github.com/astral-sh/ruff/issues/18619
# Don't emit lint for `NamedTuple`
a_plain_1: Union[NamedTuple, int] = None
a_plain_2: Union[int, NamedTuple] = None
a_plain_3: Union[NamedTuple, None] = None
a_plain_4: Union[None, NamedTuple] = None
a_plain_te_1: UnionTE[NamedTupleTE, int] = None
a_plain_te_2: UnionTE[int, NamedTupleTE] = None
a_plain_te_3: UnionTE[NamedTupleTE, None] = None
a_plain_te_4: UnionTE[None, NamedTupleTE] = None
a_plain_typing_1: UnionTE[typing.NamedTuple, int] = None
a_plain_typing_2: UnionTE[int, typing.NamedTuple] = None
a_plain_typing_3: UnionTE[typing.NamedTuple, None] = None
a_plain_typing_4: UnionTE[None, typing.NamedTuple] = None
a_string_1: "Union[NamedTuple, int]" = None
a_string_2: "Union[int, NamedTuple]" = None
a_string_3: "Union[NamedTuple, None]" = None
a_string_4: "Union[None, NamedTuple]" = None
a_string_te_1: "UnionTE[NamedTupleTE, int]" = None
a_string_te_2: "UnionTE[int, NamedTupleTE]" = None
a_string_te_3: "UnionTE[NamedTupleTE, None]" = None
a_string_te_4: "UnionTE[None, NamedTupleTE]" = None
a_string_typing_1: "typing.Union[typing.NamedTuple, int]" = None
a_string_typing_2: "typing.Union[int, typing.NamedTuple]" = None
a_string_typing_3: "typing.Union[typing.NamedTuple, None]" = None
a_string_typing_4: "typing.Union[None, typing.NamedTuple]" = None
b_plain_1: Union[NamedTuple] = None
b_plain_2: Union[NamedTuple, None] = None
b_plain_te_1: UnionTE[NamedTupleTE] = None
b_plain_te_2: UnionTE[NamedTupleTE, None] = None
b_plain_typing_1: UnionTE[typing.NamedTuple] = None
b_plain_typing_2: UnionTE[typing.NamedTuple, None] = None
b_string_1: "Union[NamedTuple]" = None
b_string_2: "Union[NamedTuple, None]" = None
b_string_te_1: "UnionTE[NamedTupleTE]" = None
b_string_te_2: "UnionTE[NamedTupleTE, None]" = None
b_string_typing_1: "typing.Union[typing.NamedTuple]" = None
b_string_typing_2: "typing.Union[typing.NamedTuple, None]" = None

View file

@ -105,3 +105,23 @@ import builtins
class C:
def f(self):
builtins.super(C, self)
# see: https://github.com/astral-sh/ruff/issues/18533
class ClassForCommentEnthusiasts(BaseClass):
def with_comments(self):
super(
# super helpful comment
ClassForCommentEnthusiasts,
self
).f()
super(
ClassForCommentEnthusiasts,
# even more helpful comment
self
).f()
super(
ClassForCommentEnthusiasts,
self
# also a comment
).f()

View file

@ -26,3 +26,9 @@ def hello():
f"foo"u"bar" # OK
f"foo" u"bar" # OK
# https://github.com/astral-sh/ruff/issues/18895
""u""
""u"hi"
""""""""""""""""""""u"hi"
""U"helloooo"

View file

@ -47,3 +47,25 @@ class ServiceRefOrValue:
# Test for: https://github.com/astral-sh/ruff/issues/18508
# Optional[None] should not be offered a fix
foo: Optional[None] = None
from typing import NamedTuple, Optional
import typing_extensions
from typing_extensions import (
NamedTuple as NamedTupleTE,
Optional as OptionalTE,
)
# Regression test for https://github.com/astral-sh/ruff/issues/18619
# Don't emit lint for `NamedTuple`
a1: Optional[NamedTuple] = None
a2: typing.Optional[NamedTuple] = None
a3: OptionalTE[NamedTuple] = None
a4: typing_extensions.Optional[NamedTuple] = None
a5: Optional[typing.NamedTuple] = None
a6: typing.Optional[typing.NamedTuple] = None
a7: OptionalTE[typing.NamedTuple] = None
a8: typing_extensions.Optional[typing.NamedTuple] = None
a9: "Optional[NamedTuple]" = None
a10: Optional[NamedTupleTE] = None

View file

@ -85,3 +85,10 @@ def _():
if isinstance(foo, type(None)):
...
# https://github.com/astral-sh/ruff/issues/19047
if isinstance(foo, ()):
pass
if isinstance(foo, Union[()]):
pass

View file

@ -7,6 +7,7 @@ use ruff_python_semantic::analyze::typing;
use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
use crate::preview::is_optional_as_none_in_union_enabled;
use crate::registry::Rule;
use crate::rules::{
airflow, flake8_2020, flake8_async, flake8_bandit, flake8_boolean_trap, flake8_bugbear,
@ -90,7 +91,13 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
if checker.is_rule_enabled(Rule::UnnecessaryLiteralUnion) {
flake8_pyi::rules::unnecessary_literal_union(checker, expr);
}
if checker.is_rule_enabled(Rule::DuplicateUnionMember) {
if checker.is_rule_enabled(Rule::DuplicateUnionMember)
// Avoid duplicate checks inside `Optional`
&& !(
is_optional_as_none_in_union_enabled(checker.settings())
&& checker.semantic.inside_optional()
)
{
flake8_pyi::rules::duplicate_union_member(checker, expr);
}
if checker.is_rule_enabled(Rule::RedundantLiteralUnion) {
@ -1430,6 +1437,11 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
if !checker.semantic.in_nested_union() {
if checker.is_rule_enabled(Rule::DuplicateUnionMember)
&& checker.semantic.in_type_definition()
// Avoid duplicate checks inside `Optional`
&& !(
is_optional_as_none_in_union_enabled(checker.settings())
&& checker.semantic.inside_optional()
)
{
flake8_pyi::rules::duplicate_union_member(checker, expr);
}

View file

@ -2765,9 +2765,7 @@ impl<'a> Checker<'a> {
self.semantic.restore(snapshot);
if self.semantic.in_annotation()
&& self.semantic.in_typing_only_annotation()
{
if self.semantic.in_typing_only_annotation() {
if self.is_rule_enabled(Rule::QuotedAnnotation) {
pyupgrade::rules::quoted_annotation(self, annotation, range);
}

View file

@ -35,39 +35,34 @@ pub(crate) fn check_noqa(
// Identify any codes that are globally exempted (within the current file).
let file_noqa_directives =
FileNoqaDirectives::extract(locator, comment_ranges, &settings.external, path);
let exemption = FileExemption::from(&file_noqa_directives);
// Extract all `noqa` directives.
let mut noqa_directives =
NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator);
if file_noqa_directives.is_empty() && noqa_directives.is_empty() {
return Vec::new();
}
let exemption = FileExemption::from(&file_noqa_directives);
// Indices of diagnostics that were ignored by a `noqa` directive.
let mut ignored_diagnostics = vec![];
// Remove any ignored diagnostics.
'outer: for (index, diagnostic) in context.iter().enumerate() {
// Can't ignore syntax errors.
let Some(code) = diagnostic.noqa_code() else {
let Some(code) = diagnostic.secondary_code() else {
continue;
};
if code == Rule::BlanketNOQA.noqa_code() {
if *code == Rule::BlanketNOQA.noqa_code() {
continue;
}
match &exemption {
FileExemption::All(_) => {
// If the file is exempted, ignore all diagnostics.
ignored_diagnostics.push(index);
continue;
}
FileExemption::Codes(codes) => {
// If the diagnostic is ignored by a global exemption, ignore it.
if codes.contains(&&code) {
ignored_diagnostics.push(index);
continue;
}
}
if exemption.contains_secondary_code(code) {
ignored_diagnostics.push(index);
continue;
}
let noqa_offsets = diagnostic
@ -82,13 +77,21 @@ pub(crate) fn check_noqa(
{
let suppressed = match &directive_line.directive {
Directive::All(_) => {
directive_line.matches.push(code);
let Ok(rule) = Rule::from_code(code) else {
debug_assert!(false, "Invalid secondary code `{code}`");
continue;
};
directive_line.matches.push(rule);
ignored_diagnostics.push(index);
true
}
Directive::Codes(directive) => {
if directive.includes(code) {
directive_line.matches.push(code);
let Ok(rule) = Rule::from_code(code) else {
debug_assert!(false, "Invalid secondary code `{code}`");
continue;
};
directive_line.matches.push(rule);
ignored_diagnostics.push(index);
true
} else {
@ -147,11 +150,11 @@ pub(crate) fn check_noqa(
if seen_codes.insert(original_code) {
let is_code_used = if is_file_level {
context
.iter()
.any(|diag| diag.noqa_code().is_some_and(|noqa| noqa == code))
context.iter().any(|diag| {
diag.secondary_code().is_some_and(|noqa| *noqa == code)
})
} else {
matches.iter().any(|match_| *match_ == code)
matches.iter().any(|match_| match_.noqa_code() == code)
} || settings
.external
.iter()

View file

@ -46,6 +46,12 @@ impl PartialEq<&str> for NoqaCode {
}
}
impl PartialEq<NoqaCode> for &str {
fn eq(&self, other: &NoqaCode) -> bool {
other.eq(self)
}
}
impl serde::Serialize for NoqaCode {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where

View file

@ -63,7 +63,7 @@ fn apply_fixes<'a>(
let mut source_map = SourceMap::default();
for (code, name, fix) in diagnostics
.filter_map(|msg| msg.noqa_code().map(|code| (code, msg.name(), msg)))
.filter_map(|msg| msg.secondary_code().map(|code| (code, msg.name(), msg)))
.filter_map(|(code, name, diagnostic)| diagnostic.fix().map(|fix| (code, name, fix)))
.sorted_by(|(_, name1, fix1), (_, name2, fix2)| cmp_fix(name1, name2, fix1, fix2))
{

View file

@ -1,12 +1,11 @@
use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::path::Path;
use anyhow::{Result, anyhow};
use colored::Colorize;
use itertools::Itertools;
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
use rustc_hash::FxHashMap;
use rustc_hash::FxBuildHasher;
use ruff_notebook::Notebook;
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
@ -23,10 +22,10 @@ use crate::checkers::imports::check_imports;
use crate::checkers::noqa::check_noqa;
use crate::checkers::physical_lines::check_physical_lines;
use crate::checkers::tokens::check_tokens;
use crate::codes::NoqaCode;
use crate::directives::Directives;
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
use crate::fix::{FixResult, fix_file};
use crate::message::SecondaryCode;
use crate::noqa::add_noqa;
use crate::package::PackageRoot;
use crate::preview::is_py314_support_enabled;
@ -95,25 +94,25 @@ struct FixCount {
/// A mapping from a noqa code to the corresponding lint name and a count of applied fixes.
#[derive(Debug, Default, PartialEq)]
pub struct FixTable(FxHashMap<NoqaCode, FixCount>);
pub struct FixTable(hashbrown::HashMap<SecondaryCode, FixCount, rustc_hash::FxBuildHasher>);
impl FixTable {
pub fn counts(&self) -> impl Iterator<Item = usize> {
self.0.values().map(|fc| fc.count)
}
pub fn entry(&mut self, code: NoqaCode) -> FixTableEntry {
FixTableEntry(self.0.entry(code))
pub fn entry<'a>(&'a mut self, code: &'a SecondaryCode) -> FixTableEntry<'a> {
FixTableEntry(self.0.entry_ref(code))
}
pub fn iter(&self) -> impl Iterator<Item = (NoqaCode, &'static str, usize)> {
pub fn iter(&self) -> impl Iterator<Item = (&SecondaryCode, &'static str, usize)> {
self.0
.iter()
.map(|(code, FixCount { rule_name, count })| (*code, *rule_name, *count))
.map(|(code, FixCount { rule_name, count })| (code, *rule_name, *count))
}
pub fn keys(&self) -> impl Iterator<Item = NoqaCode> {
self.0.keys().copied()
pub fn keys(&self) -> impl Iterator<Item = &SecondaryCode> {
self.0.keys()
}
pub fn is_empty(&self) -> bool {
@ -121,7 +120,9 @@ impl FixTable {
}
}
pub struct FixTableEntry<'a>(Entry<'a, NoqaCode, FixCount>);
pub struct FixTableEntry<'a>(
hashbrown::hash_map::EntryRef<'a, 'a, SecondaryCode, SecondaryCode, FixCount, FxBuildHasher>,
);
impl<'a> FixTableEntry<'a> {
pub fn or_default(self, rule_name: &'static str) -> &'a mut usize {
@ -678,18 +679,16 @@ pub fn lint_fix<'a>(
}
}
fn collect_rule_codes(rules: impl IntoIterator<Item = NoqaCode>) -> String {
rules
.into_iter()
.map(|rule| rule.to_string())
.sorted_unstable()
.dedup()
.join(", ")
fn collect_rule_codes<T>(rules: impl IntoIterator<Item = T>) -> String
where
T: Ord + PartialEq + std::fmt::Display,
{
rules.into_iter().sorted_unstable().dedup().join(", ")
}
#[expect(clippy::print_stderr)]
fn report_failed_to_converge_error(path: &Path, transformed: &str, diagnostics: &[OldDiagnostic]) {
let codes = collect_rule_codes(diagnostics.iter().filter_map(OldDiagnostic::noqa_code));
let codes = collect_rule_codes(diagnostics.iter().filter_map(OldDiagnostic::secondary_code));
if cfg!(debug_assertions) {
eprintln!(
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
@ -721,11 +720,11 @@ This indicates a bug in Ruff. If you could open an issue at:
}
#[expect(clippy::print_stderr)]
fn report_fix_syntax_error(
fn report_fix_syntax_error<'a>(
path: &Path,
transformed: &str,
error: &ParseError,
rules: impl IntoIterator<Item = NoqaCode>,
rules: impl IntoIterator<Item = &'a SecondaryCode>,
) {
let codes = collect_rule_codes(rules);
if cfg!(debug_assertions) {

View file

@ -33,7 +33,7 @@ impl Emitter for AzureEmitter {
line = location.line,
col = location.column,
code = diagnostic
.noqa_code()
.secondary_code()
.map_or_else(String::new, |code| format!("code={code};")),
body = diagnostic.body(),
)?;

View file

@ -33,7 +33,7 @@ impl Emitter for GithubEmitter {
writer,
"::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::",
code = diagnostic
.noqa_code()
.secondary_code()
.map_or_else(String::new, |code| format!(" ({code})")),
file = diagnostic.filename(),
row = source_location.line,
@ -50,7 +50,7 @@ impl Emitter for GithubEmitter {
column = location.column,
)?;
if let Some(code) = diagnostic.noqa_code() {
if let Some(code) = diagnostic.secondary_code() {
write!(writer, " {code}")?;
}

View file

@ -90,18 +90,15 @@ impl Serialize for SerializedMessages<'_> {
}
fingerprints.insert(message_fingerprint);
let (description, check_name) = if let Some(code) = diagnostic.noqa_code() {
(diagnostic.body().to_string(), code.to_string())
let (description, check_name) = if let Some(code) = diagnostic.secondary_code() {
(diagnostic.body().to_string(), code.as_str())
} else {
let description = diagnostic.body();
let description_without_prefix = description
.strip_prefix("SyntaxError: ")
.unwrap_or(description);
(
description_without_prefix.to_string(),
"syntax-error".to_string(),
)
(description_without_prefix.to_string(), "syntax-error")
};
let value = json!({

View file

@ -87,7 +87,7 @@ pub(crate) fn message_to_json_value(message: &OldDiagnostic, context: &EmitterCo
}
json!({
"code": message.noqa_code().map(|code| code.to_string()),
"code": message.secondary_code(),
"url": message.to_url(),
"message": message.body(),
"fix": fix,

View file

@ -59,7 +59,7 @@ impl Emitter for JunitEmitter {
body = message.body()
));
let mut case = TestCase::new(
if let Some(code) = message.noqa_code() {
if let Some(code) = message.secondary_code() {
format!("org.ruff.{code}")
} else {
"org.ruff".to_string()

View file

@ -62,7 +62,7 @@ pub struct OldDiagnostic {
pub fix: Option<Fix>,
pub parent: Option<TextSize>,
pub(crate) noqa_offset: Option<TextSize>,
pub(crate) noqa_code: Option<NoqaCode>,
pub(crate) secondary_code: Option<SecondaryCode>,
}
impl OldDiagnostic {
@ -79,7 +79,7 @@ impl OldDiagnostic {
fix: None,
parent: None,
noqa_offset: None,
noqa_code: None,
secondary_code: None,
}
}
@ -115,7 +115,7 @@ impl OldDiagnostic {
fix,
parent,
noqa_offset,
noqa_code: Some(rule.noqa_code()),
secondary_code: Some(SecondaryCode(rule.noqa_code().to_string())),
}
}
@ -247,9 +247,9 @@ impl OldDiagnostic {
self.fix().is_some()
}
/// Returns the [`NoqaCode`] corresponding to the diagnostic message.
pub fn noqa_code(&self) -> Option<NoqaCode> {
self.noqa_code
/// Returns the noqa code for the diagnostic message as a string.
pub fn secondary_code(&self) -> Option<&SecondaryCode> {
self.secondary_code.as_ref()
}
/// Returns the URL for the rule documentation, if it exists.
@ -384,6 +384,68 @@ impl<'a> EmitterContext<'a> {
}
}
/// A secondary identifier for a lint diagnostic.
///
/// For Ruff rules this means the noqa code.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash, serde::Serialize)]
#[serde(transparent)]
pub struct SecondaryCode(String);
impl SecondaryCode {
pub fn new(code: String) -> Self {
Self(code)
}
pub fn as_str(&self) -> &str {
&self.0
}
}
impl Display for SecondaryCode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
impl std::ops::Deref for SecondaryCode {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl PartialEq<&str> for SecondaryCode {
fn eq(&self, other: &&str) -> bool {
self.0 == *other
}
}
impl PartialEq<SecondaryCode> for &str {
fn eq(&self, other: &SecondaryCode) -> bool {
other.eq(self)
}
}
impl PartialEq<NoqaCode> for SecondaryCode {
fn eq(&self, other: &NoqaCode) -> bool {
&self.as_str() == other
}
}
impl PartialEq<SecondaryCode> for NoqaCode {
fn eq(&self, other: &SecondaryCode) -> bool {
other.eq(self)
}
}
// for `hashbrown::EntryRef`
impl From<&SecondaryCode> for SecondaryCode {
fn from(value: &SecondaryCode) -> Self {
value.clone()
}
}
#[cfg(test)]
mod tests {
use rustc_hash::FxHashMap;

View file

@ -26,7 +26,7 @@ impl Emitter for PylintEmitter {
diagnostic.compute_start_location().line
};
let body = if let Some(code) = diagnostic.noqa_code() {
let body = if let Some(code) = diagnostic.secondary_code() {
format!("[{code}] {body}", body = diagnostic.body())
} else {
diagnostic.body().to_string()

View file

@ -71,7 +71,7 @@ fn message_to_rdjson_value(message: &OldDiagnostic) -> Value {
"range": rdjson_range(start_location, end_location),
},
"code": {
"value": message.noqa_code().map(|code| code.to_string()),
"value": message.secondary_code(),
"url": message.to_url(),
},
"suggestions": rdjson_suggestions(fix.edits(), &source_code),
@ -84,7 +84,7 @@ fn message_to_rdjson_value(message: &OldDiagnostic) -> Value {
"range": rdjson_range(start_location, end_location),
},
"code": {
"value": message.noqa_code().map(|code| code.to_string()),
"value": message.secondary_code(),
"url": message.to_url(),
},
})

View file

@ -8,9 +8,8 @@ use serde_json::json;
use ruff_source_file::OneIndexed;
use crate::VERSION;
use crate::codes::NoqaCode;
use crate::fs::normalize_path;
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
use crate::registry::{Linter, RuleNamespace};
pub struct SarifEmitter;
@ -29,7 +28,7 @@ impl Emitter for SarifEmitter {
let unique_rules: HashSet<_> = results.iter().filter_map(|result| result.code).collect();
let mut rules: Vec<SarifRule> = unique_rules.into_iter().map(SarifRule::from).collect();
rules.sort_by(|a, b| a.code.cmp(&b.code));
rules.sort_by(|a, b| a.code.cmp(b.code));
let output = json!({
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
@ -54,26 +53,25 @@ impl Emitter for SarifEmitter {
#[derive(Debug, Clone)]
struct SarifRule<'a> {
name: &'a str,
code: String,
code: &'a SecondaryCode,
linter: &'a str,
summary: &'a str,
explanation: Option<&'a str>,
url: Option<String>,
}
impl From<NoqaCode> for SarifRule<'_> {
fn from(code: NoqaCode) -> Self {
let code_str = code.to_string();
impl<'a> From<&'a SecondaryCode> for SarifRule<'a> {
fn from(code: &'a SecondaryCode) -> Self {
// This is a manual re-implementation of Rule::from_code, but we also want the Linter. This
// avoids calling Linter::parse_code twice.
let (linter, suffix) = Linter::parse_code(&code_str).unwrap();
let (linter, suffix) = Linter::parse_code(code).unwrap();
let rule = linter
.all_rules()
.find(|rule| rule.noqa_code().suffix() == suffix)
.expect("Expected a valid noqa code corresponding to a rule");
Self {
name: rule.into(),
code: code_str,
code,
linter: linter.name(),
summary: rule.message_formats()[0],
explanation: rule.explanation(),
@ -111,8 +109,8 @@ impl Serialize for SarifRule<'_> {
}
#[derive(Debug)]
struct SarifResult {
code: Option<NoqaCode>,
struct SarifResult<'a> {
code: Option<&'a SecondaryCode>,
level: String,
message: String,
uri: String,
@ -122,14 +120,14 @@ struct SarifResult {
end_column: OneIndexed,
}
impl SarifResult {
impl<'a> SarifResult<'a> {
#[cfg(not(target_arch = "wasm32"))]
fn from_message(message: &OldDiagnostic) -> Result<Self> {
fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
let start_location = message.compute_start_location();
let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename());
Ok(Self {
code: message.noqa_code(),
code: message.secondary_code(),
level: "error".to_string(),
message: message.body().to_string(),
uri: url::Url::from_file_path(&path)
@ -144,12 +142,12 @@ impl SarifResult {
#[cfg(target_arch = "wasm32")]
#[expect(clippy::unnecessary_wraps)]
fn from_message(message: &OldDiagnostic) -> Result<Self> {
fn from_message(message: &'a OldDiagnostic) -> Result<Self> {
let start_location = message.compute_start_location();
let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename());
Ok(Self {
code: message.noqa_code(),
code: message.secondary_code(),
level: "error".to_string(),
message: message.body().to_string(),
uri: path.display().to_string(),
@ -161,7 +159,7 @@ impl SarifResult {
}
}
impl Serialize for SarifResult {
impl Serialize for SarifResult<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
@ -184,7 +182,7 @@ impl Serialize for SarifResult {
}
}
}],
"ruleId": self.code.map(|code| code.to_string()),
"ruleId": self.code,
})
.serialize(serializer)
}

View file

@ -14,7 +14,7 @@ use crate::Locator;
use crate::fs::relativize_path;
use crate::line_width::{IndentWidth, LineWidthBuilder};
use crate::message::diff::Diff;
use crate::message::{Emitter, EmitterContext, OldDiagnostic};
use crate::message::{Emitter, EmitterContext, OldDiagnostic, SecondaryCode};
use crate::settings::types::UnsafeFixes;
bitflags! {
@ -151,8 +151,8 @@ impl Display for RuleCodeAndBody<'_> {
if let Some(fix) = self.message.fix() {
// Do not display an indicator for inapplicable fixes
if fix.applies(self.unsafe_fixes.required_applicability()) {
if let Some(code) = self.message.noqa_code() {
write!(f, "{} ", code.to_string().red().bold())?;
if let Some(code) = self.message.secondary_code() {
write!(f, "{} ", code.red().bold())?;
}
return write!(
f,
@ -164,11 +164,11 @@ impl Display for RuleCodeAndBody<'_> {
}
}
if let Some(code) = self.message.noqa_code() {
if let Some(code) = self.message.secondary_code() {
write!(
f,
"{code} {body}",
code = code.to_string().red().bold(),
code = code.red().bold(),
body = self.message.body(),
)
} else {
@ -254,8 +254,9 @@ impl Display for MessageCodeFrame<'_> {
let label = self
.message
.noqa_code()
.map_or_else(String::new, |code| code.to_string());
.secondary_code()
.map(SecondaryCode::as_str)
.unwrap_or_default();
let line_start = self.notebook_index.map_or_else(
|| start_index.get(),
@ -269,7 +270,7 @@ impl Display for MessageCodeFrame<'_> {
let span = usize::from(source.annotation_range.start())
..usize::from(source.annotation_range.end());
let annotation = Level::Error.span(span).label(&label);
let annotation = Level::Error.span(span).label(label);
let snippet = Snippet::source(&source.text)
.line_start(line_start)
.annotation(annotation)

View file

@ -16,9 +16,8 @@ use rustc_hash::FxHashSet;
use crate::Edit;
use crate::Locator;
use crate::codes::NoqaCode;
use crate::fs::relativize_path;
use crate::message::OldDiagnostic;
use crate::message::{OldDiagnostic, SecondaryCode};
use crate::registry::Rule;
use crate::rule_redirects::get_redirect_target;
@ -106,9 +105,9 @@ impl Codes<'_> {
/// Returns `true` if the string list of `codes` includes `code` (or an alias
/// thereof).
pub(crate) fn includes(&self, needle: NoqaCode) -> bool {
pub(crate) fn includes<T: for<'a> PartialEq<&'a str>>(&self, needle: &T) -> bool {
self.iter()
.any(|code| needle == get_redirect_target(code.as_str()).unwrap_or(code.as_str()))
.any(|code| *needle == get_redirect_target(code.as_str()).unwrap_or(code.as_str()))
}
}
@ -140,48 +139,55 @@ pub(crate) fn rule_is_ignored(
Ok(Some(NoqaLexerOutput {
directive: Directive::Codes(codes),
..
})) => codes.includes(code.noqa_code()),
})) => codes.includes(&code.noqa_code()),
_ => false,
}
}
/// A summary of the file-level exemption as extracted from [`FileNoqaDirectives`].
#[derive(Debug)]
pub(crate) enum FileExemption<'a> {
pub(crate) enum FileExemption {
/// The file is exempt from all rules.
All(Vec<&'a NoqaCode>),
All(Vec<Rule>),
/// The file is exempt from the given rules.
Codes(Vec<&'a NoqaCode>),
Codes(Vec<Rule>),
}
impl FileExemption<'_> {
/// Returns `true` if the file is exempt from the given rule.
pub(crate) fn includes(&self, needle: Rule) -> bool {
let needle = needle.noqa_code();
impl FileExemption {
/// Returns `true` if the file is exempt from the given rule, as identified by its noqa code.
pub(crate) fn contains_secondary_code(&self, needle: &SecondaryCode) -> bool {
match self {
FileExemption::All(_) => true,
FileExemption::Codes(codes) => codes.iter().any(|code| needle == **code),
FileExemption::Codes(codes) => codes.iter().any(|code| *needle == code.noqa_code()),
}
}
/// Returns `true` if the file is exempt from the given rule.
pub(crate) fn includes(&self, needle: Rule) -> bool {
match self {
FileExemption::All(_) => true,
FileExemption::Codes(codes) => codes.contains(&needle),
}
}
/// Returns `true` if the file exemption lists the rule directly, rather than via a blanket
/// exemption.
pub(crate) fn enumerates(&self, needle: Rule) -> bool {
let needle = needle.noqa_code();
let codes = match self {
FileExemption::All(codes) => codes,
FileExemption::Codes(codes) => codes,
};
codes.iter().any(|code| needle == **code)
codes.contains(&needle)
}
}
impl<'a> From<&'a FileNoqaDirectives<'a>> for FileExemption<'a> {
impl<'a> From<&'a FileNoqaDirectives<'a>> for FileExemption {
fn from(directives: &'a FileNoqaDirectives) -> Self {
let codes = directives
.lines()
.iter()
.flat_map(|line| &line.matches)
.copied()
.collect();
if directives
.lines()
@ -203,7 +209,7 @@ pub(crate) struct FileNoqaDirectiveLine<'a> {
/// The blanket noqa directive.
pub(crate) parsed_file_exemption: Directive<'a>,
/// The codes that are ignored by the parsed exemptions.
pub(crate) matches: Vec<NoqaCode>,
pub(crate) matches: Vec<Rule>,
}
impl Ranged for FileNoqaDirectiveLine<'_> {
@ -270,7 +276,7 @@ impl<'a> FileNoqaDirectives<'a> {
if let Ok(rule) = Rule::from_code(get_redirect_target(code).unwrap_or(code))
{
Some(rule.noqa_code())
Some(rule)
} else {
#[expect(deprecated)]
let line = locator.compute_line_index(range.start());
@ -303,6 +309,10 @@ impl<'a> FileNoqaDirectives<'a> {
pub(crate) fn lines(&self) -> &[FileNoqaDirectiveLine] {
&self.0
}
pub(crate) fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
/// Output of lexing a `noqa` directive.
@ -830,7 +840,7 @@ fn build_noqa_edits_by_line<'a>(
struct NoqaComment<'a> {
line: TextSize,
code: NoqaCode,
code: &'a SecondaryCode,
directive: Option<&'a Directive<'a>>,
}
@ -846,24 +856,14 @@ fn find_noqa_comments<'a>(
// Mark any non-ignored diagnostics.
for message in diagnostics {
let Some(code) = message.noqa_code() else {
let Some(code) = message.secondary_code() else {
comments_by_line.push(None);
continue;
};
match &exemption {
FileExemption::All(_) => {
// If the file is exempted, don't add any noqa directives.
comments_by_line.push(None);
continue;
}
FileExemption::Codes(codes) => {
// If the diagnostic is ignored by a global exemption, don't add a noqa directive.
if codes.contains(&&code) {
comments_by_line.push(None);
continue;
}
}
if exemption.contains_secondary_code(code) {
comments_by_line.push(None);
continue;
}
// Is the violation ignored by a `noqa` directive on the parent line?
@ -921,7 +921,7 @@ fn find_noqa_comments<'a>(
struct NoqaEdit<'a> {
edit_range: TextRange,
noqa_codes: FxHashSet<NoqaCode>,
noqa_codes: FxHashSet<&'a SecondaryCode>,
codes: Option<&'a Codes<'a>>,
line_ending: LineEnding,
}
@ -942,13 +942,13 @@ impl NoqaEdit<'_> {
writer,
self.noqa_codes
.iter()
.map(ToString::to_string)
.chain(codes.iter().map(ToString::to_string))
.map(|code| code.as_str())
.chain(codes.iter().map(Code::as_str))
.sorted_unstable(),
);
}
None => {
push_codes(writer, self.noqa_codes.iter().map(ToString::to_string));
push_codes(writer, self.noqa_codes.iter().sorted_unstable());
}
}
write!(writer, "{}", self.line_ending.as_str()).unwrap();
@ -964,7 +964,7 @@ impl Ranged for NoqaEdit<'_> {
fn generate_noqa_edit<'a>(
directive: Option<&'a Directive>,
offset: TextSize,
noqa_codes: FxHashSet<NoqaCode>,
noqa_codes: FxHashSet<&'a SecondaryCode>,
locator: &Locator,
line_ending: LineEnding,
) -> Option<NoqaEdit<'a>> {
@ -1017,7 +1017,7 @@ pub(crate) struct NoqaDirectiveLine<'a> {
/// The noqa directive.
pub(crate) directive: Directive<'a>,
/// The codes that are ignored by the directive.
pub(crate) matches: Vec<NoqaCode>,
pub(crate) matches: Vec<Rule>,
/// Whether the directive applies to `range.end`.
pub(crate) includes_end: bool,
}
@ -1142,6 +1142,10 @@ impl<'a> NoqaDirectives<'a> {
pub(crate) fn lines(&self) -> &[NoqaDirectiveLine] {
&self.inner
}
pub(crate) fn is_empty(&self) -> bool {
self.inner.is_empty()
}
}
/// Remaps offsets falling into one of the ranges to instead check for a noqa comment on the

View file

@ -90,6 +90,11 @@ pub(crate) const fn is_ignore_init_files_in_useless_alias_enabled(
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/18572
pub(crate) const fn is_optional_as_none_in_union_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/18547
pub(crate) const fn is_invalid_async_mock_access_check_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
@ -99,3 +104,10 @@ pub(crate) const fn is_invalid_async_mock_access_check_enabled(settings: &Linter
pub(crate) const fn is_raise_exception_byte_string_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/18683
pub(crate) const fn is_safe_super_call_with_parameters_fix_enabled(
settings: &LinterSettings,
) -> bool {
settings.preview.is_enabled()
}

View file

@ -23,12 +23,16 @@ use crate::{FixAvailability, Violation};
///
/// ## Example
/// ```python
/// from airflow.auth.managers.fab.fab_auth_manage import FabAuthManager
/// from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
///
/// fab_auth_manager_app = FabAuthManager().get_fastapi_app()
/// ```
///
/// Use instead:
/// ```python
/// from airflow.providers.fab.auth_manager.fab_auth_manage import FabAuthManager
/// from airflow.providers.fab.auth_manager.fab_auth_manager import FabAuthManager
///
/// fab_auth_manager_app = FabAuthManager().get_fastapi_app()
/// ```
#[derive(ViolationMetadata)]
pub(crate) struct Airflow3MovedToProvider<'a> {

View file

@ -24,11 +24,31 @@ use ruff_text_size::TextRange;
/// ## Example
/// ```python
/// from airflow.operators.python import PythonOperator
///
///
/// def print_context(ds=None, **kwargs):
/// print(kwargs)
/// print(ds)
///
///
/// print_the_context = PythonOperator(
/// task_id="print_the_context", python_callable=print_context
/// )
/// ```
///
/// Use instead:
/// ```python
/// from airflow.providers.standard.operators.python import PythonOperator
///
///
/// def print_context(ds=None, **kwargs):
/// print(kwargs)
/// print(ds)
///
///
/// print_the_context = PythonOperator(
/// task_id="print_the_context", python_callable=print_context
/// )
/// ```
#[derive(ViolationMetadata)]
pub(crate) struct Airflow3SuggestedToMoveToProvider<'a> {

View file

@ -20,12 +20,18 @@ use crate::checkers::ast::Checker;
///
/// ## Example
/// ```python
/// import urllib
///
///
/// async def fetch():
/// urllib.request.urlopen("https://example.com/foo/bar").read()
/// ```
///
/// Use instead:
/// ```python
/// import aiohttp
///
///
/// async def fetch():
/// async with aiohttp.ClientSession() as session:
/// async with session.get("https://example.com/foo/bar") as resp:

View file

@ -21,12 +21,18 @@ use crate::checkers::ast::Checker;
///
/// ## Example
/// ```python
/// import os
///
///
/// async def foo():
/// os.popen(cmd)
/// ```
///
/// Use instead:
/// ```python
/// import asyncio
///
///
/// async def foo():
/// asyncio.create_subprocess_shell(cmd)
/// ```
@ -54,12 +60,18 @@ impl Violation for CreateSubprocessInAsyncFunction {
///
/// ## Example
/// ```python
/// import subprocess
///
///
/// async def foo():
/// subprocess.run(cmd)
/// ```
///
/// Use instead:
/// ```python
/// import asyncio
///
///
/// async def foo():
/// asyncio.create_subprocess_shell(cmd)
/// ```
@ -87,12 +99,19 @@ impl Violation for RunProcessInAsyncFunction {
///
/// ## Example
/// ```python
/// import os
///
///
/// async def foo():
/// os.waitpid(0)
/// ```
///
/// Use instead:
/// ```python
/// import asyncio
/// import os
///
///
/// def wait_for_process():
/// os.waitpid(0)
///

View file

@ -19,12 +19,18 @@ use crate::checkers::ast::Checker;
///
/// ## Example
/// ```python
/// import time
///
///
/// async def fetch():
/// time.sleep(1)
/// ```
///
/// Use instead:
/// ```python
/// import asyncio
///
///
/// async def fetch():
/// await asyncio.sleep(1)
/// ```

View file

@ -21,6 +21,9 @@ use crate::rules::flake8_async::helpers::MethodName;
///
/// ## Example
/// ```python
/// import asyncio
///
///
/// async def func():
/// async with asyncio.timeout(2):
/// do_something()
@ -28,6 +31,9 @@ use crate::rules::flake8_async::helpers::MethodName;
///
/// Use instead:
/// ```python
/// import asyncio
///
///
/// async def func():
/// async with asyncio.timeout(2):
/// do_something()

View file

@ -18,12 +18,18 @@ use crate::{Edit, Fix, FixAvailability, Violation};
///
/// ## Example
/// ```python
/// import trio
///
///
/// async def double_sleep(x):
/// trio.sleep(2 * x)
/// ```
///
/// Use instead:
/// ```python
/// import trio
///
///
/// async def double_sleep(x):
/// await trio.sleep(2 * x)
/// ```

View file

@ -18,7 +18,7 @@ use crate::checkers::ast::Checker;
///
/// ## Example
/// ```python
/// import flask
/// from flask import Flask
///
/// app = Flask()
///
@ -27,7 +27,9 @@ use crate::checkers::ast::Checker;
///
/// Use instead:
/// ```python
/// import flask
/// import os
///
/// from flask import Flask
///
/// app = Flask()
///

View file

@ -108,10 +108,10 @@ impl Violation for SubprocessWithoutShellEqualsTrue {
///
/// ## Example
/// ```python
/// import subprocess
/// import my_custom_subprocess
///
/// user_input = input("Enter a command: ")
/// subprocess.run(user_input, shell=True)
/// my_custom_subprocess.run(user_input, shell=True)
/// ```
///
/// ## References
@ -265,14 +265,14 @@ impl Violation for StartProcessWithPartialPath {
/// ```python
/// import subprocess
///
/// subprocess.Popen(["chmod", "777", "*.py"])
/// subprocess.Popen(["chmod", "777", "*.py"], shell=True)
/// ```
///
/// Use instead:
/// ```python
/// import subprocess
///
/// subprocess.Popen(["chmod", "777", "main.py"])
/// subprocess.Popen(["chmod", "777", "main.py"], shell=True)
/// ```
///
/// ## References

View file

@ -20,16 +20,22 @@ use crate::{FixAvailability, Violation};
///
/// ## Example
/// ```python
/// import itertools
///
/// itertools.batched(iterable, n)
/// ```
///
/// Use instead if the batches must be of uniform length:
/// ```python
/// import itertools
///
/// itertools.batched(iterable, n, strict=True)
/// ```
///
/// Or if the batches can be of non-uniform length:
/// ```python
/// import itertools
///
/// itertools.batched(iterable, n, strict=False)
/// ```
///

View file

@ -20,11 +20,15 @@ use crate::{checkers::ast::Checker, fix::edits::add_argument};
///
/// ## Example
/// ```python
/// import warnings
///
/// warnings.warn("This is a warning")
/// ```
///
/// Use instead:
/// ```python
/// import warnings
///
/// warnings.warn("This is a warning", stacklevel=2)
/// ```
///

View file

@ -24,6 +24,7 @@ mod tests {
#[test_case(Rule::UnnecessaryComprehensionInCall, Path::new("C419_2.py"))]
#[test_case(Rule::UnnecessaryDictComprehensionForIterable, Path::new("C420.py"))]
#[test_case(Rule::UnnecessaryDictComprehensionForIterable, Path::new("C420_1.py"))]
#[test_case(Rule::UnnecessaryDictComprehensionForIterable, Path::new("C420_2.py"))]
#[test_case(Rule::UnnecessaryDoubleCastOrProcess, Path::new("C414.py"))]
#[test_case(Rule::UnnecessaryGeneratorDict, Path::new("C402.py"))]
#[test_case(Rule::UnnecessaryGeneratorList, Path::new("C400.py"))]

View file

@ -7,6 +7,7 @@ use ruff_python_ast::{self as ast, Arguments, Comprehension, Expr, ExprCall, Exp
use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
use crate::fix::edits::pad_start;
use crate::{Edit, Fix, FixAvailability, Violation};
/// ## What it does
@ -136,12 +137,16 @@ pub(crate) fn unnecessary_dict_comprehension_for_iterable(
if checker.semantic().has_builtin_binding("dict") {
let edit = Edit::range_replacement(
checker
.generator()
.expr(&fix_unnecessary_dict_comprehension(
dict_comp.value.as_ref(),
generator,
)),
pad_start(
checker
.generator()
.expr(&fix_unnecessary_dict_comprehension(
dict_comp.value.as_ref(),
generator,
)),
dict_comp.start(),
checker.locator(),
),
dict_comp.range(),
);
diagnostic.set_fix(Fix::applicable_edit(

View file

@ -0,0 +1,16 @@
---
source: crates/ruff_linter/src/rules/flake8_comprehensions/mod.rs
---
C420_2.py:1:7: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
1 | foo or{x: None for x in bar}
| ^^^^^^^^^^^^^^^^^^^^^^ C420
|
= help: Replace with `dict.fromkeys(iterable, value)`)
Safe fix
1 |-foo or{x: None for x in bar}
1 |+foo or dict.fromkeys(bar)
2 2 |
3 3 |
4 4 | # C420 fix must make sure to insert a leading space if needed,

View file

@ -24,7 +24,7 @@ use crate::checkers::ast::Checker;
/// ```python
/// import datetime
///
/// datetime.datetime.today()
/// datetime.date.today()
/// ```
///
/// Use instead:

View file

@ -18,19 +18,25 @@ use crate::checkers::ast::Checker;
/// unexpectedly, as in:
///
/// ```python
/// import datetime
///
/// # Timezone: UTC-14
/// datetime.min.timestamp() # ValueError: year 0 is out of range
/// datetime.max.timestamp() # ValueError: year 10000 is out of range
/// datetime.datetime.min.timestamp() # ValueError: year 0 is out of range
/// datetime.datetime.max.timestamp() # ValueError: year 10000 is out of range
/// ```
///
/// ## Example
/// ```python
/// datetime.max
/// import datetime
///
/// datetime.datetime.max
/// ```
///
/// Use instead:
/// ```python
/// datetime.max.replace(tzinfo=datetime.UTC)
/// import datetime
///
/// datetime.datetime.max.replace(tzinfo=datetime.UTC)
/// ```
#[derive(ViolationMetadata)]
pub(crate) struct DatetimeMinMax {

View file

@ -22,6 +22,8 @@ mod tests {
#[test_case(Path::new("EXE002_3.py"))]
#[test_case(Path::new("EXE003.py"))]
#[test_case(Path::new("EXE003_uv.py"))]
#[test_case(Path::new("EXE003_uv_tool.py"))]
#[test_case(Path::new("EXE003_uvx.py"))]
#[test_case(Path::new("EXE004_1.py"))]
#[test_case(Path::new("EXE004_2.py"))]
#[test_case(Path::new("EXE004_3.py"))]

View file

@ -47,7 +47,12 @@ pub(crate) fn shebang_missing_python(
shebang: &ShebangDirective,
context: &LintContext,
) {
if shebang.contains("python") || shebang.contains("pytest") || shebang.contains("uv run") {
if shebang.contains("python")
|| shebang.contains("pytest")
|| shebang.contains("uv run")
|| shebang.contains("uvx")
|| shebang.contains("uv tool run")
{
return;
}

View file

@ -0,0 +1,4 @@
---
source: crates/ruff_linter/src/rules/flake8_executable/mod.rs
---

View file

@ -0,0 +1,4 @@
---
source: crates/ruff_linter/src/rules/flake8_executable/mod.rs
---

View file

@ -11,6 +11,7 @@ mod tests {
use crate::registry::Rule;
use crate::rules::pep8_naming;
use crate::settings::types::PreviewMode;
use crate::test::test_path;
use crate::{assert_diagnostics, settings};
@ -172,4 +173,23 @@ mod tests {
assert_diagnostics!(snapshot, diagnostics);
Ok(())
}
#[test_case(Rule::DuplicateUnionMember, Path::new("PYI016.py"))]
#[test_case(Rule::DuplicateUnionMember, Path::new("PYI016.pyi"))]
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"preview__{}_{}",
rule_code.noqa_code(),
path.to_string_lossy()
);
let diagnostics = test_path(
Path::new("flake8_pyi").join(path).as_path(),
&settings::LinterSettings {
preview: PreviewMode::Enabled,
..settings::LinterSettings::for_rule(rule_code)
},
)?;
assert_diagnostics!(snapshot, diagnostics);
Ok(())
}
}

View file

@ -28,8 +28,10 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
/// ## Example
///
/// ```pyi
/// from typing import Any
///
/// class Foo:
/// def __eq__(self, obj: typing.Any) -> bool: ...
/// def __eq__(self, obj: Any) -> bool: ...
/// ```
///
/// Use instead:

View file

@ -19,11 +19,15 @@ use crate::{AlwaysFixableViolation, Applicability, Edit, Fix};
///
/// ## Example
/// ```python
/// from typing import Literal
///
/// foo: Literal["a", "b", "a"]
/// ```
///
/// Use instead:
/// ```python
/// from typing import Literal
///
/// foo: Literal["a", "b"]
/// ```
///

View file

@ -1,17 +1,16 @@
use std::collections::HashSet;
use rustc_hash::FxHashSet;
use std::collections::HashSet;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::{Expr, ExprBinOp, Operator, PythonVersion};
use ruff_python_semantic::analyze::typing::traverse_union;
use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
use crate::{Applicability, Edit, Fix, FixAvailability, Violation};
use ruff_python_ast::{AtomicNodeIndex, Expr, ExprBinOp, ExprNoneLiteral, Operator, PythonVersion};
use ruff_python_semantic::analyze::typing::{traverse_union, traverse_union_and_optional};
use ruff_text_size::{Ranged, TextRange, TextSize};
use super::generate_union_fix;
use crate::checkers::ast::Checker;
use crate::preview::is_optional_as_none_in_union_enabled;
use crate::{Applicability, Edit, Fix, FixAvailability, Violation};
/// ## What it does
/// Checks for duplicate union members.
@ -71,21 +70,35 @@ pub(crate) fn duplicate_union_member<'a>(checker: &Checker, expr: &'a Expr) {
union_type = UnionKind::PEP604;
}
let virtual_expr = if is_optional_as_none_in_union_enabled(checker.settings())
&& is_optional_type(checker, expr)
{
// If the union member is an `Optional`, add a virtual `None` literal.
&VIRTUAL_NONE_LITERAL
} else {
expr
};
// If we've already seen this union member, raise a violation.
if seen_nodes.insert(expr.into()) {
unique_nodes.push(expr);
if seen_nodes.insert(virtual_expr.into()) {
unique_nodes.push(virtual_expr);
} else {
diagnostics.push(checker.report_diagnostic(
DuplicateUnionMember {
duplicate_name: checker.generator().expr(expr),
duplicate_name: checker.generator().expr(virtual_expr),
},
// Use the real expression's range for diagnostics,
expr.range(),
));
}
};
// Traverse the union, collect all diagnostic members
traverse_union(&mut check_for_duplicate_members, checker.semantic(), expr);
if is_optional_as_none_in_union_enabled(checker.settings()) {
traverse_union_and_optional(&mut check_for_duplicate_members, checker.semantic(), expr);
} else {
traverse_union(&mut check_for_duplicate_members, checker.semantic(), expr);
}
if diagnostics.is_empty() {
return;
@ -178,3 +191,12 @@ fn generate_pep604_fix(
applicability,
)
}
static VIRTUAL_NONE_LITERAL: Expr = Expr::NoneLiteral(ExprNoneLiteral {
node_index: AtomicNodeIndex::dummy(),
range: TextRange::new(TextSize::new(0), TextSize::new(0)),
});
fn is_optional_type(checker: &Checker, expr: &Expr) -> bool {
checker.semantic().match_typing_expr(expr, "Optional")
}

View file

@ -914,4 +914,79 @@ PYI016.py:115:23: PYI016 [*] Duplicate union member `int`
115 |+field35: "int | str" # Error
116 116 |
117 117 |
118 118 |
118 118 |
PYI016.py:134:45: PYI016 [*] Duplicate union member `typing.Optional[int]`
|
132 | field40: typing.Union[typing.Optional[int], None]
133 | field41: typing.Optional[typing.Union[int, None]]
134 | field42: typing.Union[typing.Optional[int], typing.Optional[int]]
| ^^^^^^^^^^^^^^^^^^^^ PYI016
135 | field43: typing.Optional[int] | None
136 | field44: typing.Optional[int | None]
|
= help: Remove duplicate union member `typing.Optional[int]`
Safe fix
131 131 | # equivalent to int | None
132 132 | field40: typing.Union[typing.Optional[int], None]
133 133 | field41: typing.Optional[typing.Union[int, None]]
134 |-field42: typing.Union[typing.Optional[int], typing.Optional[int]]
134 |+field42: typing.Optional[int]
135 135 | field43: typing.Optional[int] | None
136 136 | field44: typing.Optional[int | None]
137 137 | field45: typing.Optional[int] | typing.Optional[int]
PYI016.py:137:33: PYI016 [*] Duplicate union member `typing.Optional[int]`
|
135 | field43: typing.Optional[int] | None
136 | field44: typing.Optional[int | None]
137 | field45: typing.Optional[int] | typing.Optional[int]
| ^^^^^^^^^^^^^^^^^^^^ PYI016
138 | # equivalent to int | dict | None
139 | field46: typing.Union[typing.Optional[int], typing.Optional[dict]]
|
= help: Remove duplicate union member `typing.Optional[int]`
Safe fix
134 134 | field42: typing.Union[typing.Optional[int], typing.Optional[int]]
135 135 | field43: typing.Optional[int] | None
136 136 | field44: typing.Optional[int | None]
137 |-field45: typing.Optional[int] | typing.Optional[int]
137 |+field45: typing.Optional[int]
138 138 | # equivalent to int | dict | None
139 139 | field46: typing.Union[typing.Optional[int], typing.Optional[dict]]
140 140 | field47: typing.Optional[int] | typing.Optional[dict]
PYI016.py:143:61: PYI016 [*] Duplicate union member `complex`
|
142 | # avoid reporting twice
143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
| ^^^^^^^ PYI016
144 | field49: typing.Optional[complex | complex] | complex
|
= help: Remove duplicate union member `complex`
Safe fix
140 140 | field47: typing.Optional[int] | typing.Optional[dict]
141 141 |
142 142 | # avoid reporting twice
143 |-field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
143 |+field48: typing.Union[typing.Optional[complex], complex]
144 144 | field49: typing.Optional[complex | complex] | complex
PYI016.py:144:36: PYI016 [*] Duplicate union member `complex`
|
142 | # avoid reporting twice
143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
144 | field49: typing.Optional[complex | complex] | complex
| ^^^^^^^ PYI016
|
= help: Remove duplicate union member `complex`
Safe fix
141 141 |
142 142 | # avoid reporting twice
143 143 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
144 |-field49: typing.Optional[complex | complex] | complex
144 |+field49: typing.Optional[complex] | complex

View file

@ -1,6 +1,5 @@
---
source: crates/ruff_linter/src/rules/flake8_pyi/mod.rs
snapshot_kind: text
---
PYI016.pyi:7:15: PYI016 [*] Duplicate union member `str`
|
@ -883,6 +882,8 @@ PYI016.pyi:113:61: PYI016 [*] Duplicate union member `list[int]`
112 | # Test case for mixed union type
113 | field34: typing.Union[list[int], str] | typing.Union[bytes, list[int]] # Error
| ^^^^^^^^^ PYI016
114 |
115 | # https://github.com/astral-sh/ruff/issues/18546
|
= help: Remove duplicate union member `list[int]`
@ -892,3 +893,81 @@ PYI016.pyi:113:61: PYI016 [*] Duplicate union member `list[int]`
112 112 | # Test case for mixed union type
113 |-field34: typing.Union[list[int], str] | typing.Union[bytes, list[int]] # Error
113 |+field34: typing.Union[list[int], str, bytes] # Error
114 114 |
115 115 | # https://github.com/astral-sh/ruff/issues/18546
116 116 | # Expand Optional[T] to Union[T, None]
PYI016.pyi:125:45: PYI016 [*] Duplicate union member `typing.Optional[int]`
|
123 | field40: typing.Union[typing.Optional[int], None]
124 | field41: typing.Optional[typing.Union[int, None]]
125 | field42: typing.Union[typing.Optional[int], typing.Optional[int]]
| ^^^^^^^^^^^^^^^^^^^^ PYI016
126 | field43: typing.Optional[int] | None
127 | field44: typing.Optional[int | None]
|
= help: Remove duplicate union member `typing.Optional[int]`
Safe fix
122 122 | # equivalent to int | None
123 123 | field40: typing.Union[typing.Optional[int], None]
124 124 | field41: typing.Optional[typing.Union[int, None]]
125 |-field42: typing.Union[typing.Optional[int], typing.Optional[int]]
125 |+field42: typing.Optional[int]
126 126 | field43: typing.Optional[int] | None
127 127 | field44: typing.Optional[int | None]
128 128 | field45: typing.Optional[int] | typing.Optional[int]
PYI016.pyi:128:33: PYI016 [*] Duplicate union member `typing.Optional[int]`
|
126 | field43: typing.Optional[int] | None
127 | field44: typing.Optional[int | None]
128 | field45: typing.Optional[int] | typing.Optional[int]
| ^^^^^^^^^^^^^^^^^^^^ PYI016
129 | # equivalent to int | dict | None
130 | field46: typing.Union[typing.Optional[int], typing.Optional[dict]]
|
= help: Remove duplicate union member `typing.Optional[int]`
Safe fix
125 125 | field42: typing.Union[typing.Optional[int], typing.Optional[int]]
126 126 | field43: typing.Optional[int] | None
127 127 | field44: typing.Optional[int | None]
128 |-field45: typing.Optional[int] | typing.Optional[int]
128 |+field45: typing.Optional[int]
129 129 | # equivalent to int | dict | None
130 130 | field46: typing.Union[typing.Optional[int], typing.Optional[dict]]
131 131 | field47: typing.Optional[int] | typing.Optional[dict]
PYI016.pyi:134:61: PYI016 [*] Duplicate union member `complex`
|
133 | # avoid reporting twice
134 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
| ^^^^^^^ PYI016
135 | field49: typing.Optional[complex | complex] | complex
|
= help: Remove duplicate union member `complex`
Safe fix
131 131 | field47: typing.Optional[int] | typing.Optional[dict]
132 132 |
133 133 | # avoid reporting twice
134 |-field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
134 |+field48: typing.Union[typing.Optional[complex], complex]
135 135 | field49: typing.Optional[complex | complex] | complex
PYI016.pyi:135:36: PYI016 [*] Duplicate union member `complex`
|
133 | # avoid reporting twice
134 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
135 | field49: typing.Optional[complex | complex] | complex
| ^^^^^^^ PYI016
|
= help: Remove duplicate union member `complex`
Safe fix
132 132 |
133 133 | # avoid reporting twice
134 134 | field48: typing.Union[typing.Optional[typing.Union[complex, complex]], complex]
135 |-field49: typing.Optional[complex | complex] | complex
135 |+field49: typing.Optional[complex] | complex

View file

@ -462,12 +462,16 @@ fn has_post_loop_references(checker: &Checker, expr: &Expr, loop_end: TextSize)
.iter()
.any(|expr| has_post_loop_references(checker, expr, loop_end)),
Expr::Name(name) => {
let target_binding = checker
let Some(target_binding) = checker
.semantic()
.bindings
.iter()
.find(|binding| name.range() == binding.range)
.expect("for-loop target binding must exist");
else {
// no binding in for statement => err on the safe side and make the checker skip
// e.g., `for foo[0] in bar:` or `for foo.bar in baz:`
return true;
};
target_binding
.references()

View file

@ -374,3 +374,6 @@ PERF403.py:172:9: PERF403 [*] Use a dictionary comprehension instead of a for-lo
171 |- for o,(x,)in():
172 |- v[x,]=o
170 |+ v = {x: o for o,(x,) in ()}
173 171 |
174 172 |
175 173 | # https://github.com/astral-sh/ruff/issues/19005

View file

@ -1717,7 +1717,7 @@ fn common_section(
format!(
"{}{}",
line_end.repeat(2 - num_blank_lines),
docstring.compute_indentation()
leading_space(docstring.compute_indentation())
),
context.end() - del_len,
context.end(),

View file

@ -79,5 +79,27 @@ D413.py:69:5: D413 [*] Missing blank line after last section ("Returns")
69 69 | Returns:
70 70 | the value
71 |- """
71 |+
72 71 |
72 |+ """
73 |+
73 74 |
74 75 | def func():
75 76 | ("""Docstring.
D413.py:77:5: D413 [*] Missing blank line after last section ("Raises")
|
75 | ("""Docstring.
76 |
77 | Raises:
| ^^^^^^ D413
78 | ValueError: An error.
79 | """)
|
= help: Add blank line after "Raises"
Safe fix
76 76 |
77 77 | Raises:
78 78 | ValueError: An error.
79 |+
79 80 | """)

View file

@ -774,9 +774,10 @@ mod tests {
messages.sort_by_key(Ranged::start);
let actual = messages
.iter()
.filter_map(OldDiagnostic::noqa_code)
.filter(|msg| !msg.is_syntax_error())
.map(OldDiagnostic::name)
.collect::<Vec<_>>();
let expected: Vec<_> = expected.iter().map(Rule::noqa_code).collect();
let expected: Vec<_> = expected.iter().map(|rule| rule.name().as_str()).collect();
assert_eq!(actual, expected);
}

View file

@ -15,7 +15,7 @@ mod tests {
use crate::registry::Rule;
use crate::rules::pyupgrade;
use crate::settings::types::PreviewMode;
use crate::test::test_path;
use crate::{assert_diagnostics, settings};
@ -122,6 +122,20 @@ mod tests {
Ok(())
}
#[test_case(Rule::SuperCallWithParameters, Path::new("UP008.py"))]
fn rules_preview(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}__preview", path.to_string_lossy());
let diagnostics = test_path(
Path::new("pyupgrade").join(path).as_path(),
&settings::LinterSettings {
preview: PreviewMode::Enabled,
..settings::LinterSettings::for_rule(rule_code)
},
)?;
assert_diagnostics!(snapshot, diagnostics);
Ok(())
}
#[test]
fn async_timeout_error_alias_not_applied_py310() -> Result<()> {
let diagnostics = test_path(

View file

@ -1,8 +1,10 @@
use ruff_diagnostics::Applicability;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::{self as ast, Expr, Stmt};
use ruff_text_size::{Ranged, TextSize};
use crate::checkers::ast::Checker;
use crate::preview::is_safe_super_call_with_parameters_fix_enabled;
use crate::{AlwaysFixableViolation, Edit, Fix};
/// ## What it does
@ -45,6 +47,10 @@ use crate::{AlwaysFixableViolation, Edit, Fix};
/// This rule's fix is marked as unsafe because removing the arguments from a call
/// may delete comments that are attached to the arguments.
///
/// In [preview], the fix is marked safe if no comments are present.
///
/// [preview]: https://docs.astral.sh/ruff/preview/
///
/// ## References
/// - [Python documentation: `super`](https://docs.python.org/3/library/functions.html#super)
/// - [super/MRO, Python's most misunderstood feature.](https://www.youtube.com/watch?v=X1PQ7zzltz4)
@ -159,11 +165,22 @@ pub(crate) fn super_call_with_parameters(checker: &Checker, call: &ast::ExprCall
return;
}
let applicability = if !checker.comment_ranges().intersects(call.arguments.range())
&& is_safe_super_call_with_parameters_fix_enabled(checker.settings())
{
Applicability::Safe
} else {
Applicability::Unsafe
};
let mut diagnostic = checker.report_diagnostic(SuperCallWithParameters, call.arguments.range());
diagnostic.set_fix(Fix::unsafe_edit(Edit::deletion(
call.arguments.start() + TextSize::new(1),
call.arguments.end() - TextSize::new(1),
)));
diagnostic.set_fix(Fix::applicable_edit(
Edit::deletion(
call.arguments.start() + TextSize::new(1),
call.arguments.end() - TextSize::new(1),
),
applicability,
));
}
/// Returns `true` if a call is an argumented `super` invocation.

View file

@ -42,9 +42,28 @@ impl AlwaysFixableViolation for UnicodeKindPrefix {
pub(crate) fn unicode_kind_prefix(checker: &Checker, string: &StringLiteral) {
if string.flags.prefix().is_unicode() {
let mut diagnostic = checker.report_diagnostic(UnicodeKindPrefix, string.range);
diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(TextRange::at(
string.start(),
TextSize::from(1),
))));
let prefix_range = TextRange::at(string.start(), TextSize::new(1));
let locator = checker.locator();
let content = locator
.slice(TextRange::new(prefix_range.end(), string.end()))
.to_owned();
// If the preceding character is equivalent to the quote character, insert a space to avoid a
// syntax error. For example, when removing the `u` prefix in `""u""`, rewrite to `"" ""`
// instead of `""""`.
// see https://github.com/astral-sh/ruff/issues/18895
let edit = if locator
.slice(TextRange::up_to(prefix_range.start()))
.chars()
.last()
.is_some_and(|char| content.starts_with(char))
{
Edit::range_replacement(" ".to_string(), prefix_range)
} else {
Edit::range_deletion(prefix_range)
};
diagnostic.set_fix(Fix::safe_edit(edit));
}
}

View file

@ -132,6 +132,17 @@ pub(crate) fn non_pep604_annotation(
slice: &Expr,
operator: Pep604Operator,
) {
// `NamedTuple` is not a type; it's a type constructor. Using it in a type annotation doesn't
// make much sense. But since type checkers will currently (incorrectly) _not_ complain about it
// being used in a type annotation, we just ignore `Optional[typing.NamedTuple]` and
// `Union[...]` containing `NamedTuple`.
// <https://github.com/astral-sh/ruff/issues/18619>
if is_optional_named_tuple(checker, operator, slice)
|| is_union_with_named_tuple(checker, operator, slice)
{
return;
}
// Avoid fixing forward references, types not in an annotation, and expressions that would
// lead to invalid syntax.
let fixable = checker.semantic().in_type_definition()
@ -273,6 +284,25 @@ fn is_allowed_value(expr: &Expr) -> bool {
}
}
/// Return `true` if this is an `Optional[typing.NamedTuple]` annotation.
fn is_optional_named_tuple(checker: &Checker, operator: Pep604Operator, slice: &Expr) -> bool {
matches!(operator, Pep604Operator::Optional) && is_named_tuple(checker, slice)
}
/// Return `true` if this is a `Union[...]` annotation containing `typing.NamedTuple`.
fn is_union_with_named_tuple(checker: &Checker, operator: Pep604Operator, slice: &Expr) -> bool {
matches!(operator, Pep604Operator::Union)
&& (is_named_tuple(checker, slice)
|| slice
.as_tuple_expr()
.is_some_and(|tuple| tuple.elts.iter().any(|elt| is_named_tuple(checker, elt))))
}
/// Return `true` if this is a `typing.NamedTuple` annotation.
fn is_named_tuple(checker: &Checker, expr: &Expr) -> bool {
checker.semantic().match_typing_expr(expr, "NamedTuple")
}
/// Return `true` if this is an `Optional[None]` annotation.
fn is_optional_none(operator: Pep604Operator, slice: &Expr) -> bool {
matches!(operator, Pep604Operator::Optional) && matches!(slice, Expr::NoneLiteral(_))

View file

@ -314,3 +314,5 @@ UP007.py:91:26: UP007 [*] Use `X | Y` for type annotations
91 |-def myfunc(param: "tuple[Union[int, 'AClass', None], str]"):
91 |+def myfunc(param: "tuple[int | 'AClass' | None, str]"):
92 92 | print(param)
93 93 |
94 94 |

View file

@ -161,3 +161,91 @@ UP008.py:107:23: UP008 [*] Use `super()` instead of `super(__class__, self)`
106 106 | def f(self):
107 |- builtins.super(C, self)
107 |+ builtins.super()
108 108 |
109 109 |
110 110 | # see: https://github.com/astral-sh/ruff/issues/18533
UP008.py:113:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
111 | class ClassForCommentEnthusiasts(BaseClass):
112 | def with_comments(self):
113 | super(
| ______________^
114 | | # super helpful comment
115 | | ClassForCommentEnthusiasts,
116 | | self
117 | | ).f()
| |_________^ UP008
118 | super(
119 | ClassForCommentEnthusiasts,
|
= help: Remove `super()` parameters
Unsafe fix
110 110 | # see: https://github.com/astral-sh/ruff/issues/18533
111 111 | class ClassForCommentEnthusiasts(BaseClass):
112 112 | def with_comments(self):
113 |- super(
114 |- # super helpful comment
115 |- ClassForCommentEnthusiasts,
116 |- self
117 |- ).f()
113 |+ super().f()
118 114 | super(
119 115 | ClassForCommentEnthusiasts,
120 116 | # even more helpful comment
UP008.py:118:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
116 | self
117 | ).f()
118 | super(
| ______________^
119 | | ClassForCommentEnthusiasts,
120 | | # even more helpful comment
121 | | self
122 | | ).f()
| |_________^ UP008
123 | super(
124 | ClassForCommentEnthusiasts,
|
= help: Remove `super()` parameters
Unsafe fix
115 115 | ClassForCommentEnthusiasts,
116 116 | self
117 117 | ).f()
118 |- super(
119 |- ClassForCommentEnthusiasts,
120 |- # even more helpful comment
121 |- self
122 |- ).f()
118 |+ super().f()
123 119 | super(
124 120 | ClassForCommentEnthusiasts,
125 121 | self
UP008.py:123:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
121 | self
122 | ).f()
123 | super(
| ______________^
124 | | ClassForCommentEnthusiasts,
125 | | self
126 | | # also a comment
127 | | ).f()
| |_________^ UP008
|
= help: Remove `super()` parameters
Unsafe fix
120 120 | # even more helpful comment
121 121 | self
122 122 | ).f()
123 |- super(
124 |- ClassForCommentEnthusiasts,
125 |- self
126 |- # also a comment
127 |- ).f()
123 |+ super().f()

View file

@ -0,0 +1,251 @@
---
source: crates/ruff_linter/src/rules/pyupgrade/mod.rs
---
UP008.py:17:23: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
16 | def wrong(self):
17 | parent = super(Child, self) # wrong
| ^^^^^^^^^^^^^ UP008
18 | super(Child, self).method # wrong
19 | super(
|
= help: Remove `super()` parameters
Safe fix
14 14 | Parent.super(1, 2) # ok
15 15 |
16 16 | def wrong(self):
17 |- parent = super(Child, self) # wrong
17 |+ parent = super() # wrong
18 18 | super(Child, self).method # wrong
19 19 | super(
20 20 | Child,
UP008.py:18:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
16 | def wrong(self):
17 | parent = super(Child, self) # wrong
18 | super(Child, self).method # wrong
| ^^^^^^^^^^^^^ UP008
19 | super(
20 | Child,
|
= help: Remove `super()` parameters
Safe fix
15 15 |
16 16 | def wrong(self):
17 17 | parent = super(Child, self) # wrong
18 |- super(Child, self).method # wrong
18 |+ super().method # wrong
19 19 | super(
20 20 | Child,
21 21 | self,
UP008.py:19:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
17 | parent = super(Child, self) # wrong
18 | super(Child, self).method # wrong
19 | super(
| ______________^
20 | | Child,
21 | | self,
22 | | ).method() # wrong
| |_________^ UP008
|
= help: Remove `super()` parameters
Safe fix
16 16 | def wrong(self):
17 17 | parent = super(Child, self) # wrong
18 18 | super(Child, self).method # wrong
19 |- super(
20 |- Child,
21 |- self,
22 |- ).method() # wrong
19 |+ super().method() # wrong
23 20 |
24 21 |
25 22 | class BaseClass:
UP008.py:36:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
34 | class MyClass(BaseClass):
35 | def normal(self):
36 | super(MyClass, self).f() # can use super()
| ^^^^^^^^^^^^^^^ UP008
37 | super().f()
|
= help: Remove `super()` parameters
Safe fix
33 33 |
34 34 | class MyClass(BaseClass):
35 35 | def normal(self):
36 |- super(MyClass, self).f() # can use super()
36 |+ super().f() # can use super()
37 37 | super().f()
38 38 |
39 39 | def different_argument(self, other):
UP008.py:50:18: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
49 | def inner_argument(self):
50 | super(MyClass, self).f() # can use super()
| ^^^^^^^^^^^^^^^ UP008
51 | super().f()
|
= help: Remove `super()` parameters
Safe fix
47 47 | super(MyClass, self).f() # CANNOT use super()
48 48 |
49 49 | def inner_argument(self):
50 |- super(MyClass, self).f() # can use super()
50 |+ super().f() # can use super()
51 51 | super().f()
52 52 |
53 53 | outer_argument()
UP008.py:74:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
72 | class DataClass:
73 | def normal(self):
74 | super(DataClass, self).f() # Error
| ^^^^^^^^^^^^^^^^^ UP008
75 | super().f() # OK
|
= help: Remove `super()` parameters
Safe fix
71 71 | @dataclass
72 72 | class DataClass:
73 73 | def normal(self):
74 |- super(DataClass, self).f() # Error
74 |+ super().f() # Error
75 75 | super().f() # OK
76 76 |
77 77 |
UP008.py:92:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
90 | class B(A):
91 | def bar(self):
92 | super(__class__, self).foo()
| ^^^^^^^^^^^^^^^^^ UP008
|
= help: Remove `super()` parameters
Safe fix
89 89 |
90 90 | class B(A):
91 91 | def bar(self):
92 |- super(__class__, self).foo()
92 |+ super().foo()
93 93 |
94 94 |
95 95 | # see: https://github.com/astral-sh/ruff/issues/18684
UP008.py:107:23: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
105 | class C:
106 | def f(self):
107 | builtins.super(C, self)
| ^^^^^^^^^ UP008
|
= help: Remove `super()` parameters
Safe fix
104 104 |
105 105 | class C:
106 106 | def f(self):
107 |- builtins.super(C, self)
107 |+ builtins.super()
108 108 |
109 109 |
110 110 | # see: https://github.com/astral-sh/ruff/issues/18533
UP008.py:113:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
111 | class ClassForCommentEnthusiasts(BaseClass):
112 | def with_comments(self):
113 | super(
| ______________^
114 | | # super helpful comment
115 | | ClassForCommentEnthusiasts,
116 | | self
117 | | ).f()
| |_________^ UP008
118 | super(
119 | ClassForCommentEnthusiasts,
|
= help: Remove `super()` parameters
Unsafe fix
110 110 | # see: https://github.com/astral-sh/ruff/issues/18533
111 111 | class ClassForCommentEnthusiasts(BaseClass):
112 112 | def with_comments(self):
113 |- super(
114 |- # super helpful comment
115 |- ClassForCommentEnthusiasts,
116 |- self
117 |- ).f()
113 |+ super().f()
118 114 | super(
119 115 | ClassForCommentEnthusiasts,
120 116 | # even more helpful comment
UP008.py:118:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
116 | self
117 | ).f()
118 | super(
| ______________^
119 | | ClassForCommentEnthusiasts,
120 | | # even more helpful comment
121 | | self
122 | | ).f()
| |_________^ UP008
123 | super(
124 | ClassForCommentEnthusiasts,
|
= help: Remove `super()` parameters
Unsafe fix
115 115 | ClassForCommentEnthusiasts,
116 116 | self
117 117 | ).f()
118 |- super(
119 |- ClassForCommentEnthusiasts,
120 |- # even more helpful comment
121 |- self
122 |- ).f()
118 |+ super().f()
123 119 | super(
124 120 | ClassForCommentEnthusiasts,
125 121 | self
UP008.py:123:14: UP008 [*] Use `super()` instead of `super(__class__, self)`
|
121 | self
122 | ).f()
123 | super(
| ______________^
124 | | ClassForCommentEnthusiasts,
125 | | self
126 | | # also a comment
127 | | ).f()
| |_________^ UP008
|
= help: Remove `super()` parameters
Unsafe fix
120 120 | # even more helpful comment
121 121 | self
122 122 | ).f()
123 |- super(
124 |- ClassForCommentEnthusiasts,
125 |- self
126 |- # also a comment
127 |- ).f()
123 |+ super().f()

View file

@ -281,14 +281,18 @@ UP025.py:27:7: UP025 [*] Remove unicode literals from strings
25 25 | return"Hello" # OK
26 26 |
27 |-f"foo"u"bar" # OK
27 |+f"foo""bar" # OK
27 |+f"foo" "bar" # OK
28 28 | f"foo" u"bar" # OK
29 29 |
30 30 | # https://github.com/astral-sh/ruff/issues/18895
UP025.py:28:8: UP025 [*] Remove unicode literals from strings
|
27 | f"foo"u"bar" # OK
28 | f"foo" u"bar" # OK
| ^^^^^^ UP025
29 |
30 | # https://github.com/astral-sh/ruff/issues/18895
|
= help: Remove unicode prefix
@ -298,3 +302,80 @@ UP025.py:28:8: UP025 [*] Remove unicode literals from strings
27 27 | f"foo"u"bar" # OK
28 |-f"foo" u"bar" # OK
28 |+f"foo" "bar" # OK
29 29 |
30 30 | # https://github.com/astral-sh/ruff/issues/18895
31 31 | ""u""
UP025.py:31:3: UP025 [*] Remove unicode literals from strings
|
30 | # https://github.com/astral-sh/ruff/issues/18895
31 | ""u""
| ^^^ UP025
32 | ""u"hi"
33 | """"""""""""""""""""u"hi"
|
= help: Remove unicode prefix
Safe fix
28 28 | f"foo" u"bar" # OK
29 29 |
30 30 | # https://github.com/astral-sh/ruff/issues/18895
31 |-""u""
31 |+"" ""
32 32 | ""u"hi"
33 33 | """"""""""""""""""""u"hi"
34 34 | ""U"helloooo"
UP025.py:32:3: UP025 [*] Remove unicode literals from strings
|
30 | # https://github.com/astral-sh/ruff/issues/18895
31 | ""u""
32 | ""u"hi"
| ^^^^^ UP025
33 | """"""""""""""""""""u"hi"
34 | ""U"helloooo"
|
= help: Remove unicode prefix
Safe fix
29 29 |
30 30 | # https://github.com/astral-sh/ruff/issues/18895
31 31 | ""u""
32 |-""u"hi"
32 |+"" "hi"
33 33 | """"""""""""""""""""u"hi"
34 34 | ""U"helloooo"
UP025.py:33:21: UP025 [*] Remove unicode literals from strings
|
31 | ""u""
32 | ""u"hi"
33 | """"""""""""""""""""u"hi"
| ^^^^^ UP025
34 | ""U"helloooo"
|
= help: Remove unicode prefix
Safe fix
30 30 | # https://github.com/astral-sh/ruff/issues/18895
31 31 | ""u""
32 32 | ""u"hi"
33 |-""""""""""""""""""""u"hi"
33 |+"""""""""""""""""""" "hi"
34 34 | ""U"helloooo"
UP025.py:34:3: UP025 [*] Remove unicode literals from strings
|
32 | ""u"hi"
33 | """"""""""""""""""""u"hi"
34 | ""U"helloooo"
| ^^^^^^^^^^^ UP025
|
= help: Remove unicode prefix
Safe fix
31 31 | ""u""
32 32 | ""u"hi"
33 33 | """"""""""""""""""""u"hi"
34 |-""U"helloooo"
34 |+"" "helloooo"

View file

@ -100,7 +100,9 @@ fn is_none(expr: &Expr, semantic: &SemanticModel) -> bool {
}
// Ex) `(type(None),)`
Expr::Tuple(tuple) => tuple.iter().all(|element| inner(element, false, semantic)),
Expr::Tuple(tuple) => {
!tuple.is_empty() && tuple.iter().all(|element| inner(element, false, semantic))
}
// Ex) `type(None) | type(None)`
Expr::BinOp(ast::ExprBinOp {
@ -125,7 +127,8 @@ fn is_none(expr: &Expr, semantic: &SemanticModel) -> bool {
match slice.as_ref() {
Expr::Tuple(ast::ExprTuple { elts, .. }) => {
elts.iter().all(|element| inner(element, true, semantic))
!elts.is_empty()
&& elts.iter().all(|element| inner(element, true, semantic))
}
slice => inner(slice, true, semantic),
}

View file

@ -237,9 +237,9 @@ Source with applied fixes:
let messages = messages
.into_iter()
.filter_map(|msg| Some((msg.noqa_code()?, msg)))
.filter_map(|msg| Some((msg.secondary_code()?.to_string(), msg)))
.map(|(code, mut diagnostic)| {
let rule = Rule::from_code(&code.to_string()).unwrap();
let rule = Rule::from_code(&code).unwrap();
let fixable = diagnostic.fix().is_some_and(|fix| {
matches!(
fix.applicability(),

View file

@ -41,7 +41,10 @@ serde = [
"dep:ruff_cache",
"compact_str/serde",
]
get-size = ["dep:get-size2"]
get-size = [
"dep:get-size2",
"ruff_text_size/get-size"
]
[lints]
workspace = true

View file

@ -19,13 +19,14 @@ where
///
/// This type is interiorly mutable to allow assigning node indices
/// on-demand after parsing.
#[derive(Default, get_size2::GetSize)]
#[derive(Default)]
#[cfg_attr(feature = "get-size", derive(get_size2::GetSize))]
pub struct AtomicNodeIndex(AtomicU32);
impl AtomicNodeIndex {
/// Returns a placeholder `AtomicNodeIndex`.
pub fn dummy() -> AtomicNodeIndex {
AtomicNodeIndex(AtomicU32::from(u32::MAX))
pub const fn dummy() -> AtomicNodeIndex {
AtomicNodeIndex(AtomicU32::new(u32::MAX))
}
/// Load the current value of the `AtomicNodeIndex`.

View file

@ -875,6 +875,7 @@ bitflags! {
}
}
#[cfg(feature = "get-size")]
impl get_size2::GetSize for InterpolatedStringFlagsInner {}
/// Flags that can be queried to obtain information
@ -1462,6 +1463,7 @@ bitflags! {
}
}
#[cfg(feature = "get-size")]
impl get_size2::GetSize for StringLiteralFlagsInner {}
/// Flags that can be queried to obtain information
@ -1880,6 +1882,7 @@ bitflags! {
}
}
#[cfg(feature = "get-size")]
impl get_size2::GetSize for BytesLiteralFlagsInner {}
/// Flags that can be queried to obtain information

View file

@ -1,9 +1,9 @@
use ruff_db::{Db as SourceDb, Upcast, files::File};
use ruff_db::{Db as SourceDb, files::File};
use crate::PyFormatOptions;
#[salsa::db]
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
pub trait Db: SourceDb {
/// Returns the formatting options
fn format_options(&self, file: File) -> PyFormatOptions;
}

View file

@ -165,14 +165,14 @@ where
pub fn formatted_file(db: &dyn Db, file: File) -> Result<Option<String>, FormatModuleError> {
let options = db.format_options(file);
let parsed = parsed_module(db.upcast(), file).load(db.upcast());
let parsed = parsed_module(db, file).load(db);
if let Some(first) = parsed.errors().first() {
return Err(FormatModuleError::ParseError(first.clone()));
}
let comment_ranges = CommentRanges::from(parsed.tokens());
let source = source_text(db.upcast(), file);
let source = source_text(db, file);
let formatted = format_node(&parsed, &comment_ranges, &source, options)?;
let printed = formatted.print()?;

Some files were not shown because too many files have changed in this diff Show more