mirror of
https://github.com/astral-sh/ruff.git
synced 2025-12-04 17:51:57 +00:00
Remove flake8-to-ruff (#9329)
## Summary We stopped releasing this a while ago and no longer advertise it anywhere. IMO, we should remove it so that we stop paying the cost of maintaining it. If we want to revive it, we can always do so from Git.
This commit is contained in:
parent
da8a3af524
commit
772e5d587d
22 changed files with 0 additions and 2324 deletions
247
.github/workflows/flake8-to-ruff.yaml
vendored
247
.github/workflows/flake8-to-ruff.yaml
vendored
|
|
@ -1,247 +0,0 @@
|
||||||
name: "[flake8-to-ruff] Release"
|
|
||||||
|
|
||||||
on: workflow_dispatch
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
env:
|
|
||||||
PACKAGE_NAME: flake8-to-ruff
|
|
||||||
CRATE_NAME: flake8_to_ruff
|
|
||||||
PYTHON_VERSION: "3.11"
|
|
||||||
CARGO_INCREMENTAL: 0
|
|
||||||
CARGO_NET_RETRY: 10
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
RUSTUP_MAX_RETRIES: 10
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
macos-x86_64:
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
architecture: x64
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: "Build wheels - x86_64"
|
|
||||||
uses: PyO3/maturin-action@v1
|
|
||||||
with:
|
|
||||||
target: x86_64
|
|
||||||
args: --release --out dist --sdist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
|
||||||
- name: "Install built wheel - x86_64"
|
|
||||||
run: |
|
|
||||||
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
|
||||||
- name: "Upload wheels"
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: wheels
|
|
||||||
path: dist
|
|
||||||
macos-universal:
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
architecture: x64
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: "Build wheels - universal2"
|
|
||||||
uses: PyO3/maturin-action@v1
|
|
||||||
with:
|
|
||||||
args: --release --target universal2-apple-darwin --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
|
||||||
- name: "Install built wheel - universal2"
|
|
||||||
run: |
|
|
||||||
pip install dist/${{ env.CRATE_NAME }}-*universal2.whl --force-reinstall
|
|
||||||
- name: "Upload wheels"
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: wheels
|
|
||||||
path: dist
|
|
||||||
|
|
||||||
windows:
|
|
||||||
runs-on: windows-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
target: [x64, x86]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
architecture: ${{ matrix.target }}
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: "Build wheels"
|
|
||||||
uses: PyO3/maturin-action@v1
|
|
||||||
with:
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
|
||||||
- name: "Install built wheel"
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
python -m pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
|
||||||
- name: "Upload wheels"
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: wheels
|
|
||||||
path: dist
|
|
||||||
|
|
||||||
linux:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
target: [x86_64, i686]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
architecture: x64
|
|
||||||
- name: "Build wheels"
|
|
||||||
uses: PyO3/maturin-action@v1
|
|
||||||
with:
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
manylinux: auto
|
|
||||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
|
||||||
- name: "Install built wheel"
|
|
||||||
if: matrix.target == 'x86_64'
|
|
||||||
run: |
|
|
||||||
pip install dist/${{ env.CRATE_NAME }}-*.whl --force-reinstall
|
|
||||||
- name: "Upload wheels"
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: wheels
|
|
||||||
path: dist
|
|
||||||
|
|
||||||
linux-cross:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
target: [aarch64, armv7, s390x, ppc64le, ppc64]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
- name: "Build wheels"
|
|
||||||
uses: PyO3/maturin-action@v1
|
|
||||||
with:
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
manylinux: auto
|
|
||||||
args: --no-default-features --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
|
||||||
- uses: uraimo/run-on-arch-action@v2
|
|
||||||
if: matrix.target != 'ppc64'
|
|
||||||
name: Install built wheel
|
|
||||||
with:
|
|
||||||
arch: ${{ matrix.target }}
|
|
||||||
distro: ubuntu20.04
|
|
||||||
githubToken: ${{ github.token }}
|
|
||||||
install: |
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y --no-install-recommends python3 python3-pip
|
|
||||||
pip3 install -U pip
|
|
||||||
run: |
|
|
||||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
|
||||||
- name: "Upload wheels"
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: wheels
|
|
||||||
path: dist
|
|
||||||
|
|
||||||
musllinux:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
target:
|
|
||||||
- x86_64-unknown-linux-musl
|
|
||||||
- i686-unknown-linux-musl
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
architecture: x64
|
|
||||||
- name: "Build wheels"
|
|
||||||
uses: PyO3/maturin-action@v1
|
|
||||||
with:
|
|
||||||
target: ${{ matrix.target }}
|
|
||||||
manylinux: musllinux_1_2
|
|
||||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
|
||||||
- name: "Install built wheel"
|
|
||||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
|
||||||
uses: addnab/docker-run-action@v3
|
|
||||||
with:
|
|
||||||
image: alpine:latest
|
|
||||||
options: -v ${{ github.workspace }}:/io -w /io
|
|
||||||
run: |
|
|
||||||
apk add py3-pip
|
|
||||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
|
||||||
- name: "Upload wheels"
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: wheels
|
|
||||||
path: dist
|
|
||||||
|
|
||||||
musllinux-cross:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
platform:
|
|
||||||
- target: aarch64-unknown-linux-musl
|
|
||||||
arch: aarch64
|
|
||||||
- target: armv7-unknown-linux-musleabihf
|
|
||||||
arch: armv7
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
- name: "Build wheels"
|
|
||||||
uses: PyO3/maturin-action@v1
|
|
||||||
with:
|
|
||||||
target: ${{ matrix.platform.target }}
|
|
||||||
manylinux: musllinux_1_2
|
|
||||||
args: --release --out dist -m ./${{ env.CRATE_NAME }}/Cargo.toml
|
|
||||||
- uses: uraimo/run-on-arch-action@v2
|
|
||||||
name: Install built wheel
|
|
||||||
with:
|
|
||||||
arch: ${{ matrix.platform.arch }}
|
|
||||||
distro: alpine_latest
|
|
||||||
githubToken: ${{ github.token }}
|
|
||||||
install: |
|
|
||||||
apk add py3-pip
|
|
||||||
run: |
|
|
||||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
|
||||||
- name: "Upload wheels"
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: wheels
|
|
||||||
path: dist
|
|
||||||
|
|
||||||
release:
|
|
||||||
name: Release
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- macos-universal
|
|
||||||
- macos-x86_64
|
|
||||||
- windows
|
|
||||||
- linux
|
|
||||||
- linux-cross
|
|
||||||
- musllinux
|
|
||||||
- musllinux-cross
|
|
||||||
steps:
|
|
||||||
- uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: wheels
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
- name: "Publish to PyPi"
|
|
||||||
env:
|
|
||||||
TWINE_USERNAME: __token__
|
|
||||||
TWINE_PASSWORD: ${{ secrets.FLAKE8_TO_RUFF_TOKEN }}
|
|
||||||
run: |
|
|
||||||
pip install --upgrade twine
|
|
||||||
twine upload --skip-existing *
|
|
||||||
30
Cargo.lock
generated
30
Cargo.lock
generated
|
|
@ -442,12 +442,6 @@ dependencies = [
|
||||||
"windows-sys 0.48.0",
|
"windows-sys 0.48.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "configparser"
|
|
||||||
version = "3.0.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e0e56e414a2a52ab2a104f85cd40933c2fbc278b83637facf646ecf451b49237"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "console"
|
name = "console"
|
||||||
version = "0.15.7"
|
version = "0.15.7"
|
||||||
|
|
@ -806,30 +800,6 @@ version = "0.4.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "flake8-to-ruff"
|
|
||||||
version = "0.1.9"
|
|
||||||
dependencies = [
|
|
||||||
"anyhow",
|
|
||||||
"clap",
|
|
||||||
"colored",
|
|
||||||
"configparser",
|
|
||||||
"itertools 0.11.0",
|
|
||||||
"log",
|
|
||||||
"once_cell",
|
|
||||||
"pep440_rs 0.4.0",
|
|
||||||
"pretty_assertions",
|
|
||||||
"regex",
|
|
||||||
"ruff_linter",
|
|
||||||
"ruff_workspace",
|
|
||||||
"rustc-hash",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"strum",
|
|
||||||
"strum_macros",
|
|
||||||
"toml",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flate2"
|
name = "flate2"
|
||||||
version = "1.0.27"
|
version = "1.0.27"
|
||||||
|
|
|
||||||
|
|
@ -1,39 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "flake8-to-ruff"
|
|
||||||
version = "0.1.9"
|
|
||||||
description = """
|
|
||||||
Convert Flake8 configuration files to Ruff configuration files.
|
|
||||||
"""
|
|
||||||
authors = { workspace = true }
|
|
||||||
edition = { workspace = true }
|
|
||||||
rust-version = { workspace = true }
|
|
||||||
homepage = { workspace = true }
|
|
||||||
documentation = { workspace = true }
|
|
||||||
repository = { workspace = true }
|
|
||||||
license = { workspace = true }
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
ruff_linter = { path = "../ruff_linter", default-features = false }
|
|
||||||
ruff_workspace = { path = "../ruff_workspace" }
|
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
clap = { workspace = true }
|
|
||||||
colored = { workspace = true }
|
|
||||||
configparser = { version = "3.0.3" }
|
|
||||||
itertools = { workspace = true }
|
|
||||||
log = { workspace = true }
|
|
||||||
once_cell = { workspace = true }
|
|
||||||
pep440_rs = { version = "0.4.0", features = ["serde"] }
|
|
||||||
regex = { workspace = true }
|
|
||||||
rustc-hash = { workspace = true }
|
|
||||||
serde = { workspace = true }
|
|
||||||
serde_json = { workspace = true }
|
|
||||||
strum = { workspace = true }
|
|
||||||
strum_macros = { workspace = true }
|
|
||||||
toml = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
pretty_assertions = "1.3.0"
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
@ -1,99 +0,0 @@
|
||||||
# flake8-to-ruff
|
|
||||||
|
|
||||||
Convert existing Flake8 configuration files (`setup.cfg`, `tox.ini`, or `.flake8`) for use with
|
|
||||||
[Ruff](https://github.com/astral-sh/ruff).
|
|
||||||
|
|
||||||
Generates a Ruff-compatible `pyproject.toml` section.
|
|
||||||
|
|
||||||
## Installation and Usage
|
|
||||||
|
|
||||||
### Installation
|
|
||||||
|
|
||||||
Available as [`flake8-to-ruff`](https://pypi.org/project/flake8-to-ruff/) on PyPI:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
pip install flake8-to-ruff
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
To run `flake8-to-ruff`:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
flake8-to-ruff path/to/setup.cfg
|
|
||||||
flake8-to-ruff path/to/tox.ini
|
|
||||||
flake8-to-ruff path/to/.flake8
|
|
||||||
```
|
|
||||||
|
|
||||||
`flake8-to-ruff` will print the relevant `pyproject.toml` sections to standard output, like so:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[tool.ruff]
|
|
||||||
exclude = [
|
|
||||||
'.svn',
|
|
||||||
'CVS',
|
|
||||||
'.bzr',
|
|
||||||
'.hg',
|
|
||||||
'.git',
|
|
||||||
'__pycache__',
|
|
||||||
'.tox',
|
|
||||||
'.idea',
|
|
||||||
'.mypy_cache',
|
|
||||||
'.venv',
|
|
||||||
'node_modules',
|
|
||||||
'_state_machine.py',
|
|
||||||
'test_fstring.py',
|
|
||||||
'bad_coding2.py',
|
|
||||||
'badsyntax_*.py',
|
|
||||||
]
|
|
||||||
select = [
|
|
||||||
'A',
|
|
||||||
'E',
|
|
||||||
'F',
|
|
||||||
'Q',
|
|
||||||
]
|
|
||||||
ignore = []
|
|
||||||
|
|
||||||
[tool.ruff.flake8-quotes]
|
|
||||||
inline-quotes = 'single'
|
|
||||||
|
|
||||||
[tool.ruff.pep8-naming]
|
|
||||||
ignore-names = [
|
|
||||||
'foo',
|
|
||||||
'bar',
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Plugins
|
|
||||||
|
|
||||||
`flake8-to-ruff` will attempt to infer any activated plugins based on the settings provided in your
|
|
||||||
configuration file.
|
|
||||||
|
|
||||||
For example, if your `.flake8` file includes a `docstring-convention` property, `flake8-to-ruff`
|
|
||||||
will enable the appropriate [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
|
|
||||||
checks.
|
|
||||||
|
|
||||||
Alternatively, you can manually specify plugins on the command-line:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
|
|
||||||
```
|
|
||||||
|
|
||||||
## Limitations
|
|
||||||
|
|
||||||
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
|
|
||||||
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
|
|
||||||
configuration options that don't exist in Flake8.)
|
|
||||||
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
|
||||||
codes from unsupported plugins. (See the
|
|
||||||
[documentation](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8) for the complete
|
|
||||||
list of supported plugins.)
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
MIT
|
|
||||||
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
Contributions are welcome and hugely appreciated. To get started, check out the
|
|
||||||
[contributing guidelines](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md).
|
|
||||||
|
|
@ -1,65 +0,0 @@
|
||||||
[build-system]
|
|
||||||
requires = [
|
|
||||||
# The minimum setuptools version is specific to the PEP 517 backend,
|
|
||||||
# and may be stricter than the version required in `setup.cfg`
|
|
||||||
"setuptools>=40.6.0,!=60.9.0",
|
|
||||||
"wheel",
|
|
||||||
# Must be kept in sync with the `install_requirements` in `setup.cfg`
|
|
||||||
"cffi>=1.12; platform_python_implementation != 'PyPy'",
|
|
||||||
"setuptools-rust>=0.11.4",
|
|
||||||
]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[tool.black]
|
|
||||||
line-length = 79
|
|
||||||
target-version = ["py36"]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
addopts = "-r s --capture=no --strict-markers --benchmark-disable"
|
|
||||||
markers = [
|
|
||||||
"skip_fips: this test is not executed in FIPS mode",
|
|
||||||
"supported: parametrized test requiring only_if and skip_message",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.mypy]
|
|
||||||
show_error_codes = true
|
|
||||||
check_untyped_defs = true
|
|
||||||
no_implicit_reexport = true
|
|
||||||
warn_redundant_casts = true
|
|
||||||
warn_unused_ignores = true
|
|
||||||
warn_unused_configs = true
|
|
||||||
strict_equality = true
|
|
||||||
|
|
||||||
[[tool.mypy.overrides]]
|
|
||||||
module = [
|
|
||||||
"pretend"
|
|
||||||
]
|
|
||||||
ignore_missing_imports = true
|
|
||||||
|
|
||||||
[tool.coverage.run]
|
|
||||||
branch = true
|
|
||||||
relative_files = true
|
|
||||||
source = [
|
|
||||||
"cryptography",
|
|
||||||
"tests/",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.coverage.paths]
|
|
||||||
source = [
|
|
||||||
"src/cryptography",
|
|
||||||
"*.tox/*/lib*/python*/site-packages/cryptography",
|
|
||||||
"*.tox\\*\\Lib\\site-packages\\cryptography",
|
|
||||||
"*.tox/pypy/site-packages/cryptography",
|
|
||||||
]
|
|
||||||
tests =[
|
|
||||||
"tests/",
|
|
||||||
"*tests\\",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.coverage.report]
|
|
||||||
exclude_lines = [
|
|
||||||
"@abc.abstractmethod",
|
|
||||||
"@abc.abstractproperty",
|
|
||||||
"@typing.overload",
|
|
||||||
"if typing.TYPE_CHECKING",
|
|
||||||
]
|
|
||||||
|
|
@ -1,91 +0,0 @@
|
||||||
[metadata]
|
|
||||||
name = cryptography
|
|
||||||
version = attr: cryptography.__version__
|
|
||||||
description = cryptography is a package which provides cryptographic recipes and primitives to Python developers.
|
|
||||||
long_description = file: README.rst
|
|
||||||
long_description_content_type = text/x-rst
|
|
||||||
license = BSD-3-Clause OR Apache-2.0
|
|
||||||
url = https://github.com/pyca/cryptography
|
|
||||||
author = The Python Cryptographic Authority and individual contributors
|
|
||||||
author_email = cryptography-dev@python.org
|
|
||||||
project_urls =
|
|
||||||
Documentation=https://cryptography.io/
|
|
||||||
Source=https://github.com/pyca/cryptography/
|
|
||||||
Issues=https://github.com/pyca/cryptography/issues
|
|
||||||
Changelog=https://cryptography.io/en/latest/changelog/
|
|
||||||
classifiers =
|
|
||||||
Development Status :: 5 - Production/Stable
|
|
||||||
Intended Audience :: Developers
|
|
||||||
License :: OSI Approved :: Apache Software License
|
|
||||||
License :: OSI Approved :: BSD License
|
|
||||||
Natural Language :: English
|
|
||||||
Operating System :: MacOS :: MacOS X
|
|
||||||
Operating System :: POSIX
|
|
||||||
Operating System :: POSIX :: BSD
|
|
||||||
Operating System :: POSIX :: Linux
|
|
||||||
Operating System :: Microsoft :: Windows
|
|
||||||
Programming Language :: Python
|
|
||||||
Programming Language :: Python :: 3
|
|
||||||
Programming Language :: Python :: 3 :: Only
|
|
||||||
Programming Language :: Python :: 3.6
|
|
||||||
Programming Language :: Python :: 3.7
|
|
||||||
Programming Language :: Python :: 3.8
|
|
||||||
Programming Language :: Python :: 3.9
|
|
||||||
Programming Language :: Python :: 3.10
|
|
||||||
Programming Language :: Python :: 3.11
|
|
||||||
Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Programming Language :: Python :: Implementation :: PyPy
|
|
||||||
Topic :: Security :: Cryptography
|
|
||||||
|
|
||||||
[options]
|
|
||||||
python_requires = >=3.6
|
|
||||||
include_package_data = True
|
|
||||||
zip_safe = False
|
|
||||||
package_dir =
|
|
||||||
=src
|
|
||||||
packages = find:
|
|
||||||
# `install_requires` must be kept in sync with `pyproject.toml`
|
|
||||||
install_requires =
|
|
||||||
cffi >=1.12
|
|
||||||
|
|
||||||
[options.packages.find]
|
|
||||||
where = src
|
|
||||||
exclude =
|
|
||||||
_cffi_src
|
|
||||||
_cffi_src.*
|
|
||||||
|
|
||||||
[options.extras_require]
|
|
||||||
test =
|
|
||||||
pytest>=6.2.0
|
|
||||||
pytest-benchmark
|
|
||||||
pytest-cov
|
|
||||||
pytest-subtests
|
|
||||||
pytest-xdist
|
|
||||||
pretend
|
|
||||||
iso8601
|
|
||||||
pytz
|
|
||||||
hypothesis>=1.11.4,!=3.79.2
|
|
||||||
docs =
|
|
||||||
sphinx >= 1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0
|
|
||||||
sphinx_rtd_theme
|
|
||||||
docstest =
|
|
||||||
pyenchant >= 1.6.11
|
|
||||||
twine >= 1.12.0
|
|
||||||
sphinxcontrib-spelling >= 4.0.1
|
|
||||||
sdist =
|
|
||||||
setuptools_rust >= 0.11.4
|
|
||||||
pep8test =
|
|
||||||
black
|
|
||||||
flake8
|
|
||||||
flake8-import-order
|
|
||||||
pep8-naming
|
|
||||||
# This extra is for OpenSSH private keys that use bcrypt KDF
|
|
||||||
# Versions: v3.1.3 - ignore_few_rounds, v3.1.5 - abi3
|
|
||||||
ssh =
|
|
||||||
bcrypt >= 3.1.5
|
|
||||||
|
|
||||||
[flake8]
|
|
||||||
ignore = E203,E211,W503,W504,N818
|
|
||||||
exclude = .tox,*.egg,.git,_build,.hypothesis
|
|
||||||
select = E,W,F,N,I
|
|
||||||
application-import-names = cryptography,cryptography_vectors,tests
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
[flake8]
|
|
||||||
# Ignore style and complexity
|
|
||||||
# E: style errors
|
|
||||||
# W: style warnings
|
|
||||||
# C: complexity
|
|
||||||
# D: docstring warnings (unused pydocstyle extension)
|
|
||||||
# F841: local variable assigned but never used
|
|
||||||
ignore = E, C, W, D, F841
|
|
||||||
builtins = c, get_config
|
|
||||||
exclude =
|
|
||||||
.cache,
|
|
||||||
.github,
|
|
||||||
docs,
|
|
||||||
jupyterhub/alembic*,
|
|
||||||
onbuild,
|
|
||||||
scripts,
|
|
||||||
share,
|
|
||||||
tools,
|
|
||||||
setup.py
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
||||||
[flake8]
|
|
||||||
# Exclude the grpc generated code
|
|
||||||
exclude = ./manim/grpc/gen/*
|
|
||||||
max-complexity = 15
|
|
||||||
max-line-length = 88
|
|
||||||
statistics = True
|
|
||||||
# Prevents some flake8-rst-docstrings errors
|
|
||||||
rst-roles = attr,class,func,meth,mod,obj,ref,doc,exc
|
|
||||||
rst-directives = manim, SEEALSO, seealso
|
|
||||||
docstring-convention=numpy
|
|
||||||
|
|
||||||
select = A,A00,B,B9,C4,C90,D,E,F,F,PT,RST,SIM,W
|
|
||||||
|
|
||||||
# General Compatibility
|
|
||||||
extend-ignore = E203, W503, D202, D212, D213, D404
|
|
||||||
|
|
||||||
# Misc
|
|
||||||
F401, F403, F405, F841, E501, E731, E402, F811, F821,
|
|
||||||
|
|
||||||
# Plug-in: flake8-builtins
|
|
||||||
A001, A002, A003,
|
|
||||||
|
|
||||||
# Plug-in: flake8-bugbear
|
|
||||||
B006, B007, B008, B009, B010, B903, B950,
|
|
||||||
|
|
||||||
# Plug-in: flake8-simplify
|
|
||||||
SIM105, SIM106, SIM119,
|
|
||||||
|
|
||||||
# Plug-in: flake8-comprehensions
|
|
||||||
C901
|
|
||||||
|
|
||||||
# Plug-in: flake8-pytest-style
|
|
||||||
PT001, PT004, PT006, PT011, PT018, PT022, PT023,
|
|
||||||
|
|
||||||
# Plug-in: flake8-docstrings
|
|
||||||
D100, D101, D102, D103, D104, D105, D106, D107,
|
|
||||||
D200, D202, D204, D205, D209,
|
|
||||||
D301,
|
|
||||||
D400, D401, D402, D403, D405, D406, D407, D409, D411, D412, D414,
|
|
||||||
|
|
||||||
# Plug-in: flake8-rst-docstrings
|
|
||||||
RST201, RST203, RST210, RST212, RST213, RST215,
|
|
||||||
RST301, RST303,
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
[flake8]
|
|
||||||
min_python_version = 3.7.0
|
|
||||||
max-line-length = 88
|
|
||||||
ban-relative-imports = true
|
|
||||||
# flake8-use-fstring: https://github.com/MichaelKim0407/flake8-use-fstring#--percent-greedy-and---format-greedy
|
|
||||||
format-greedy = 1
|
|
||||||
inline-quotes = double
|
|
||||||
enable-extensions = TC, TC1
|
|
||||||
type-checking-strict = true
|
|
||||||
eradicate-whitelist-extend = ^-.*;
|
|
||||||
extend-ignore =
|
|
||||||
# E203: Whitespace before ':' (pycqa/pycodestyle#373)
|
|
||||||
E203,
|
|
||||||
# SIM106: Handle error-cases first
|
|
||||||
SIM106,
|
|
||||||
# ANN101: Missing type annotation for self in method
|
|
||||||
ANN101,
|
|
||||||
# ANN102: Missing type annotation for cls in classmethod
|
|
||||||
ANN102,
|
|
||||||
# PIE781: assign-and-return
|
|
||||||
PIE781,
|
|
||||||
# PIE798 no-unnecessary-class: Consider using a module for namespacing instead
|
|
||||||
PIE798,
|
|
||||||
per-file-ignores =
|
|
||||||
# TC002: Move third-party import '...' into a type-checking block
|
|
||||||
__init__.py:TC002,
|
|
||||||
# ANN201: Missing return type annotation for public function
|
|
||||||
tests/test_*:ANN201
|
|
||||||
tests/**/test_*:ANN201
|
|
||||||
extend-exclude =
|
|
||||||
# Frozen and not subject to change in this repo:
|
|
||||||
get-poetry.py,
|
|
||||||
install-poetry.py,
|
|
||||||
# External to the project's coding standards:
|
|
||||||
tests/fixtures/*,
|
|
||||||
tests/**/fixtures/*,
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
[flake8]
|
|
||||||
max-line-length=120
|
|
||||||
docstring-convention=all
|
|
||||||
import-order-style=pycharm
|
|
||||||
application_import_names=bot,tests
|
|
||||||
exclude=.cache,.venv,.git,constants.py
|
|
||||||
extend-ignore=
|
|
||||||
B311,W503,E226,S311,T000,E731
|
|
||||||
# Missing Docstrings
|
|
||||||
D100,D104,D105,D107,
|
|
||||||
# Docstring Whitespace
|
|
||||||
D203,D212,D214,D215,
|
|
||||||
# Docstring Quotes
|
|
||||||
D301,D302,
|
|
||||||
# Docstring Content
|
|
||||||
D400,D401,D402,D404,D405,D406,D407,D408,D409,D410,D411,D412,D413,D414,D416,D417
|
|
||||||
# Type Annotations
|
|
||||||
ANN002,ANN003,ANN101,ANN102,ANN204,ANN206,ANN401
|
|
||||||
per-file-ignores=tests/*:D,ANN
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
||||||
[flake8]
|
|
||||||
ignore = E203, E501, W503
|
|
||||||
per-file-ignores =
|
|
||||||
requests/__init__.py:E402, F401
|
|
||||||
requests/compat.py:E402, F401
|
|
||||||
tests/compat.py:F401
|
|
||||||
|
|
@ -1,34 +0,0 @@
|
||||||
[project]
|
|
||||||
name = "flake8-to-ruff"
|
|
||||||
keywords = ["automation", "flake8", "pycodestyle", "pyflakes", "pylint", "clippy"]
|
|
||||||
classifiers = [
|
|
||||||
"Development Status :: 3 - Alpha",
|
|
||||||
"Environment :: Console",
|
|
||||||
"Intended Audience :: Developers",
|
|
||||||
"License :: OSI Approved :: MIT License",
|
|
||||||
"Operating System :: OS Independent",
|
|
||||||
"Programming Language :: Python",
|
|
||||||
"Programming Language :: Python :: 3.7",
|
|
||||||
"Programming Language :: Python :: 3.8",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
|
||||||
"Programming Language :: Python :: 3.11",
|
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
|
||||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
||||||
"Topic :: Software Development :: Quality Assurance",
|
|
||||||
]
|
|
||||||
author = "Charlie Marsh"
|
|
||||||
author_email = "charlie.r.marsh@gmail.com"
|
|
||||||
description = "Convert existing Flake8 configuration to Ruff."
|
|
||||||
requires-python = ">=3.7"
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
repository = "https://github.com/astral-sh/ruff#subdirectory=crates/flake8_to_ruff"
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["maturin>=1.0,<2.0"]
|
|
||||||
build-backend = "maturin"
|
|
||||||
|
|
||||||
[tool.maturin]
|
|
||||||
bindings = "bin"
|
|
||||||
strip = true
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
//! Extract Black configuration settings from a pyproject.toml.
|
|
||||||
|
|
||||||
use ruff_linter::line_width::LineLength;
|
|
||||||
use ruff_linter::settings::types::PythonVersion;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
|
||||||
pub(crate) struct Black {
|
|
||||||
#[serde(alias = "line-length", alias = "line_length")]
|
|
||||||
pub(crate) line_length: Option<LineLength>,
|
|
||||||
#[serde(alias = "target-version", alias = "target_version")]
|
|
||||||
pub(crate) target_version: Option<Vec<PythonVersion>>,
|
|
||||||
}
|
|
||||||
|
|
@ -1,687 +0,0 @@
|
||||||
use std::collections::{HashMap, HashSet};
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use itertools::Itertools;
|
|
||||||
|
|
||||||
use ruff_linter::line_width::LineLength;
|
|
||||||
use ruff_linter::registry::Linter;
|
|
||||||
use ruff_linter::rule_selector::RuleSelector;
|
|
||||||
use ruff_linter::rules::flake8_pytest_style::types::{
|
|
||||||
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
|
|
||||||
};
|
|
||||||
use ruff_linter::rules::flake8_quotes::settings::Quote;
|
|
||||||
use ruff_linter::rules::flake8_tidy_imports::settings::Strictness;
|
|
||||||
use ruff_linter::rules::pydocstyle::settings::Convention;
|
|
||||||
use ruff_linter::settings::types::PythonVersion;
|
|
||||||
use ruff_linter::settings::DEFAULT_SELECTORS;
|
|
||||||
use ruff_linter::warn_user;
|
|
||||||
use ruff_workspace::options::{
|
|
||||||
Flake8AnnotationsOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, Flake8ErrMsgOptions,
|
|
||||||
Flake8PytestStyleOptions, Flake8QuotesOptions, Flake8TidyImportsOptions, LintCommonOptions,
|
|
||||||
LintOptions, McCabeOptions, Options, Pep8NamingOptions, PydocstyleOptions,
|
|
||||||
};
|
|
||||||
use ruff_workspace::pyproject::Pyproject;
|
|
||||||
|
|
||||||
use super::external_config::ExternalConfig;
|
|
||||||
use super::plugin::Plugin;
|
|
||||||
use super::{parser, plugin};
|
|
||||||
|
|
||||||
pub(crate) fn convert(
|
|
||||||
config: &HashMap<String, HashMap<String, Option<String>>>,
|
|
||||||
external_config: &ExternalConfig,
|
|
||||||
plugins: Option<Vec<Plugin>>,
|
|
||||||
) -> Pyproject {
|
|
||||||
// Extract the Flake8 section.
|
|
||||||
let flake8 = config
|
|
||||||
.get("flake8")
|
|
||||||
.expect("Unable to find flake8 section in INI file");
|
|
||||||
|
|
||||||
// Extract all referenced rule code prefixes, to power plugin inference.
|
|
||||||
let mut referenced_codes: HashSet<RuleSelector> = HashSet::default();
|
|
||||||
for (key, value) in flake8 {
|
|
||||||
if let Some(value) = value {
|
|
||||||
match key.as_str() {
|
|
||||||
"select" | "ignore" | "extend-select" | "extend_select" | "extend-ignore"
|
|
||||||
| "extend_ignore" => {
|
|
||||||
referenced_codes.extend(parser::parse_prefix_codes(value.as_ref()));
|
|
||||||
}
|
|
||||||
"per-file-ignores" | "per_file_ignores" => {
|
|
||||||
if let Ok(per_file_ignores) =
|
|
||||||
parser::parse_files_to_codes_mapping(value.as_ref())
|
|
||||||
{
|
|
||||||
for (_, codes) in parser::collect_per_file_ignores(per_file_ignores) {
|
|
||||||
referenced_codes.extend(codes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Infer plugins, if not provided.
|
|
||||||
let plugins = plugins.unwrap_or_else(|| {
|
|
||||||
let from_options = plugin::infer_plugins_from_options(flake8);
|
|
||||||
if !from_options.is_empty() {
|
|
||||||
#[allow(clippy::print_stderr)]
|
|
||||||
{
|
|
||||||
eprintln!("Inferred plugins from settings: {from_options:#?}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
|
|
||||||
if !from_codes.is_empty() {
|
|
||||||
#[allow(clippy::print_stderr)]
|
|
||||||
{
|
|
||||||
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
from_options.into_iter().chain(from_codes).collect()
|
|
||||||
});
|
|
||||||
|
|
||||||
// Check if the user has specified a `select`. If not, we'll add our own
|
|
||||||
// default `select`, and populate it based on user plugins.
|
|
||||||
let mut select = flake8
|
|
||||||
.get("select")
|
|
||||||
.and_then(|value| {
|
|
||||||
value
|
|
||||||
.as_ref()
|
|
||||||
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
|
||||||
})
|
|
||||||
.unwrap_or_else(|| resolve_select(&plugins));
|
|
||||||
let mut ignore: HashSet<RuleSelector> = flake8
|
|
||||||
.get("ignore")
|
|
||||||
.and_then(|value| {
|
|
||||||
value
|
|
||||||
.as_ref()
|
|
||||||
.map(|value| HashSet::from_iter(parser::parse_prefix_codes(value)))
|
|
||||||
})
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
// Parse each supported option.
|
|
||||||
let mut options = Options::default();
|
|
||||||
let mut lint_options = LintCommonOptions::default();
|
|
||||||
let mut flake8_annotations = Flake8AnnotationsOptions::default();
|
|
||||||
let mut flake8_bugbear = Flake8BugbearOptions::default();
|
|
||||||
let mut flake8_builtins = Flake8BuiltinsOptions::default();
|
|
||||||
let mut flake8_errmsg = Flake8ErrMsgOptions::default();
|
|
||||||
let mut flake8_pytest_style = Flake8PytestStyleOptions::default();
|
|
||||||
let mut flake8_quotes = Flake8QuotesOptions::default();
|
|
||||||
let mut flake8_tidy_imports = Flake8TidyImportsOptions::default();
|
|
||||||
let mut mccabe = McCabeOptions::default();
|
|
||||||
let mut pep8_naming = Pep8NamingOptions::default();
|
|
||||||
let mut pydocstyle = PydocstyleOptions::default();
|
|
||||||
for (key, value) in flake8 {
|
|
||||||
if let Some(value) = value {
|
|
||||||
match key.as_str() {
|
|
||||||
// flake8
|
|
||||||
"builtins" => {
|
|
||||||
options.builtins = Some(parser::parse_strings(value.as_ref()));
|
|
||||||
}
|
|
||||||
"max-line-length" | "max_line_length" => match LineLength::from_str(value) {
|
|
||||||
Ok(line_length) => options.line_length = Some(line_length),
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"select" => {
|
|
||||||
// No-op (handled above).
|
|
||||||
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
|
||||||
}
|
|
||||||
"ignore" => {
|
|
||||||
// No-op (handled above).
|
|
||||||
}
|
|
||||||
"extend-select" | "extend_select" => {
|
|
||||||
// Unlike Flake8, use a single explicit `select`.
|
|
||||||
select.extend(parser::parse_prefix_codes(value.as_ref()));
|
|
||||||
}
|
|
||||||
"extend-ignore" | "extend_ignore" => {
|
|
||||||
// Unlike Flake8, use a single explicit `ignore`.
|
|
||||||
ignore.extend(parser::parse_prefix_codes(value.as_ref()));
|
|
||||||
}
|
|
||||||
"exclude" => {
|
|
||||||
options.exclude = Some(parser::parse_strings(value.as_ref()));
|
|
||||||
}
|
|
||||||
"extend-exclude" | "extend_exclude" => {
|
|
||||||
options.extend_exclude = Some(parser::parse_strings(value.as_ref()));
|
|
||||||
}
|
|
||||||
"per-file-ignores" | "per_file_ignores" => {
|
|
||||||
match parser::parse_files_to_codes_mapping(value.as_ref()) {
|
|
||||||
Ok(per_file_ignores) => {
|
|
||||||
lint_options.per_file_ignores =
|
|
||||||
Some(parser::collect_per_file_ignores(per_file_ignores));
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// flake8-bugbear
|
|
||||||
"extend-immutable-calls" | "extend_immutable_calls" => {
|
|
||||||
flake8_bugbear.extend_immutable_calls =
|
|
||||||
Some(parser::parse_strings(value.as_ref()));
|
|
||||||
}
|
|
||||||
// flake8-builtins
|
|
||||||
"builtins-ignorelist" | "builtins_ignorelist" => {
|
|
||||||
flake8_builtins.builtins_ignorelist =
|
|
||||||
Some(parser::parse_strings(value.as_ref()));
|
|
||||||
}
|
|
||||||
// flake8-annotations
|
|
||||||
"suppress-none-returning" | "suppress_none_returning" => {
|
|
||||||
match parser::parse_bool(value.as_ref()) {
|
|
||||||
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"suppress-dummy-args" | "suppress_dummy_args" => {
|
|
||||||
match parser::parse_bool(value.as_ref()) {
|
|
||||||
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"mypy-init-return" | "mypy_init_return" => {
|
|
||||||
match parser::parse_bool(value.as_ref()) {
|
|
||||||
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"allow-star-arg-any" | "allow_star_arg_any" => {
|
|
||||||
match parser::parse_bool(value.as_ref()) {
|
|
||||||
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// flake8-quotes
|
|
||||||
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
|
|
||||||
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
|
|
||||||
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
|
|
||||||
_ => {
|
|
||||||
warn_user!("Unexpected '{key}' value: {value}");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"multiline-quotes" | "multiline_quotes" => match value.trim() {
|
|
||||||
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
|
|
||||||
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
|
|
||||||
_ => {
|
|
||||||
warn_user!("Unexpected '{key}' value: {value}");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"docstring-quotes" | "docstring_quotes" => match value.trim() {
|
|
||||||
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
|
|
||||||
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
|
|
||||||
_ => {
|
|
||||||
warn_user!("Unexpected '{key}' value: {value}");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
|
|
||||||
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// pep8-naming
|
|
||||||
"ignore-names" | "ignore_names" => {
|
|
||||||
pep8_naming.ignore_names = Some(parser::parse_strings(value.as_ref()));
|
|
||||||
}
|
|
||||||
"classmethod-decorators" | "classmethod_decorators" => {
|
|
||||||
pep8_naming.classmethod_decorators =
|
|
||||||
Some(parser::parse_strings(value.as_ref()));
|
|
||||||
}
|
|
||||||
"staticmethod-decorators" | "staticmethod_decorators" => {
|
|
||||||
pep8_naming.staticmethod_decorators =
|
|
||||||
Some(parser::parse_strings(value.as_ref()));
|
|
||||||
}
|
|
||||||
// flake8-tidy-imports
|
|
||||||
"ban-relative-imports" | "ban_relative_imports" => match value.trim() {
|
|
||||||
"true" => flake8_tidy_imports.ban_relative_imports = Some(Strictness::All),
|
|
||||||
"parents" => {
|
|
||||||
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
warn_user!("Unexpected '{key}' value: {value}");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// flake8-docstrings
|
|
||||||
"docstring-convention" => match value.trim() {
|
|
||||||
"google" => pydocstyle.convention = Some(Convention::Google),
|
|
||||||
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
|
|
||||||
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
|
|
||||||
"all" => pydocstyle.convention = None,
|
|
||||||
_ => {
|
|
||||||
warn_user!("Unexpected '{key}' value: {value}");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// mccabe
|
|
||||||
"max-complexity" | "max_complexity" => match value.parse::<usize>() {
|
|
||||||
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// flake8-errmsg
|
|
||||||
"errmsg-max-string-length" | "errmsg_max_string_length" => {
|
|
||||||
match value.parse::<usize>() {
|
|
||||||
Ok(max_string_length) => {
|
|
||||||
flake8_errmsg.max_string_length = Some(max_string_length);
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// flake8-pytest-style
|
|
||||||
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
|
||||||
match parser::parse_bool(value.as_ref()) {
|
|
||||||
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
|
||||||
match value.trim() {
|
|
||||||
"csv" => {
|
|
||||||
flake8_pytest_style.parametrize_names_type =
|
|
||||||
Some(ParametrizeNameType::Csv);
|
|
||||||
}
|
|
||||||
"tuple" => {
|
|
||||||
flake8_pytest_style.parametrize_names_type =
|
|
||||||
Some(ParametrizeNameType::Tuple);
|
|
||||||
}
|
|
||||||
"list" => {
|
|
||||||
flake8_pytest_style.parametrize_names_type =
|
|
||||||
Some(ParametrizeNameType::List);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
warn_user!("Unexpected '{key}' value: {value}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
|
||||||
match value.trim() {
|
|
||||||
"tuple" => {
|
|
||||||
flake8_pytest_style.parametrize_values_type =
|
|
||||||
Some(ParametrizeValuesType::Tuple);
|
|
||||||
}
|
|
||||||
"list" => {
|
|
||||||
flake8_pytest_style.parametrize_values_type =
|
|
||||||
Some(ParametrizeValuesType::List);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
warn_user!("Unexpected '{key}' value: {value}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
|
||||||
match value.trim() {
|
|
||||||
"tuple" => {
|
|
||||||
flake8_pytest_style.parametrize_values_row_type =
|
|
||||||
Some(ParametrizeValuesRowType::Tuple);
|
|
||||||
}
|
|
||||||
"list" => {
|
|
||||||
flake8_pytest_style.parametrize_values_row_type =
|
|
||||||
Some(ParametrizeValuesRowType::List);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
warn_user!("Unexpected '{key}' value: {value}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
|
||||||
flake8_pytest_style.raises_require_match_for =
|
|
||||||
Some(parser::parse_strings(value.as_ref()));
|
|
||||||
}
|
|
||||||
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
|
||||||
match parser::parse_bool(value.as_ref()) {
|
|
||||||
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
|
|
||||||
Err(e) => {
|
|
||||||
warn_user!("Unable to parse '{key}' property: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Unknown
|
|
||||||
_ => {
|
|
||||||
warn_user!("Skipping unsupported property: {}", key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Deduplicate and sort.
|
|
||||||
lint_options.select = Some(
|
|
||||||
select
|
|
||||||
.into_iter()
|
|
||||||
.sorted_by_key(RuleSelector::prefix_and_code)
|
|
||||||
.collect(),
|
|
||||||
);
|
|
||||||
lint_options.ignore = Some(
|
|
||||||
ignore
|
|
||||||
.into_iter()
|
|
||||||
.sorted_by_key(RuleSelector::prefix_and_code)
|
|
||||||
.collect(),
|
|
||||||
);
|
|
||||||
if flake8_annotations != Flake8AnnotationsOptions::default() {
|
|
||||||
lint_options.flake8_annotations = Some(flake8_annotations);
|
|
||||||
}
|
|
||||||
if flake8_bugbear != Flake8BugbearOptions::default() {
|
|
||||||
lint_options.flake8_bugbear = Some(flake8_bugbear);
|
|
||||||
}
|
|
||||||
if flake8_builtins != Flake8BuiltinsOptions::default() {
|
|
||||||
lint_options.flake8_builtins = Some(flake8_builtins);
|
|
||||||
}
|
|
||||||
if flake8_errmsg != Flake8ErrMsgOptions::default() {
|
|
||||||
lint_options.flake8_errmsg = Some(flake8_errmsg);
|
|
||||||
}
|
|
||||||
if flake8_pytest_style != Flake8PytestStyleOptions::default() {
|
|
||||||
lint_options.flake8_pytest_style = Some(flake8_pytest_style);
|
|
||||||
}
|
|
||||||
if flake8_quotes != Flake8QuotesOptions::default() {
|
|
||||||
lint_options.flake8_quotes = Some(flake8_quotes);
|
|
||||||
}
|
|
||||||
if flake8_tidy_imports != Flake8TidyImportsOptions::default() {
|
|
||||||
lint_options.flake8_tidy_imports = Some(flake8_tidy_imports);
|
|
||||||
}
|
|
||||||
if mccabe != McCabeOptions::default() {
|
|
||||||
lint_options.mccabe = Some(mccabe);
|
|
||||||
}
|
|
||||||
if pep8_naming != Pep8NamingOptions::default() {
|
|
||||||
lint_options.pep8_naming = Some(pep8_naming);
|
|
||||||
}
|
|
||||||
if pydocstyle != PydocstyleOptions::default() {
|
|
||||||
lint_options.pydocstyle = Some(pydocstyle);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract any settings from the existing `pyproject.toml`.
|
|
||||||
if let Some(black) = &external_config.black {
|
|
||||||
if let Some(line_length) = &black.line_length {
|
|
||||||
options.line_length = Some(*line_length);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(target_version) = &black.target_version {
|
|
||||||
if let Some(target_version) = target_version.iter().min() {
|
|
||||||
options.target_version = Some(*target_version);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(isort) = &external_config.isort {
|
|
||||||
if let Some(src_paths) = &isort.src_paths {
|
|
||||||
match options.src.as_mut() {
|
|
||||||
Some(src) => {
|
|
||||||
src.extend_from_slice(src_paths);
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
options.src = Some(src_paths.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(project) = &external_config.project {
|
|
||||||
if let Some(requires_python) = &project.requires_python {
|
|
||||||
if options.target_version.is_none() {
|
|
||||||
options.target_version =
|
|
||||||
PythonVersion::get_minimum_supported_version(requires_python);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if lint_options != LintCommonOptions::default() {
|
|
||||||
options.lint = Some(LintOptions {
|
|
||||||
common: lint_options,
|
|
||||||
..LintOptions::default()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the pyproject.toml.
|
|
||||||
Pyproject::new(options)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve the set of enabled `RuleSelector` values for the given
|
|
||||||
/// plugins.
|
|
||||||
fn resolve_select(plugins: &[Plugin]) -> HashSet<RuleSelector> {
|
|
||||||
let mut select: HashSet<_> = DEFAULT_SELECTORS.iter().cloned().collect();
|
|
||||||
select.extend(plugins.iter().map(|p| Linter::from(p).into()));
|
|
||||||
select
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use itertools::Itertools;
|
|
||||||
use pep440_rs::VersionSpecifiers;
|
|
||||||
|
|
||||||
use pretty_assertions::assert_eq;
|
|
||||||
use ruff_linter::line_width::LineLength;
|
|
||||||
use ruff_linter::registry::Linter;
|
|
||||||
use ruff_linter::rule_selector::RuleSelector;
|
|
||||||
use ruff_linter::rules::flake8_quotes;
|
|
||||||
use ruff_linter::rules::pydocstyle::settings::Convention;
|
|
||||||
use ruff_linter::settings::types::PythonVersion;
|
|
||||||
use ruff_workspace::options::{
|
|
||||||
Flake8QuotesOptions, LintCommonOptions, LintOptions, Options, PydocstyleOptions,
|
|
||||||
};
|
|
||||||
use ruff_workspace::pyproject::Pyproject;
|
|
||||||
|
|
||||||
use crate::converter::DEFAULT_SELECTORS;
|
|
||||||
use crate::pep621::Project;
|
|
||||||
use crate::ExternalConfig;
|
|
||||||
|
|
||||||
use super::super::plugin::Plugin;
|
|
||||||
use super::convert;
|
|
||||||
|
|
||||||
fn lint_default_options(plugins: impl IntoIterator<Item = RuleSelector>) -> LintCommonOptions {
|
|
||||||
LintCommonOptions {
|
|
||||||
ignore: Some(vec![]),
|
|
||||||
select: Some(
|
|
||||||
DEFAULT_SELECTORS
|
|
||||||
.iter()
|
|
||||||
.cloned()
|
|
||||||
.chain(plugins)
|
|
||||||
.sorted_by_key(RuleSelector::prefix_and_code)
|
|
||||||
.collect(),
|
|
||||||
),
|
|
||||||
..LintCommonOptions::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_converts_empty() {
|
|
||||||
let actual = convert(
|
|
||||||
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
|
||||||
&ExternalConfig::default(),
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
let expected = Pyproject::new(Options {
|
|
||||||
lint: Some(LintOptions {
|
|
||||||
common: lint_default_options([]),
|
|
||||||
..LintOptions::default()
|
|
||||||
}),
|
|
||||||
..Options::default()
|
|
||||||
});
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_converts_dashes() {
|
|
||||||
let actual = convert(
|
|
||||||
&HashMap::from([(
|
|
||||||
"flake8".to_string(),
|
|
||||||
HashMap::from([("max-line-length".to_string(), Some("100".to_string()))]),
|
|
||||||
)]),
|
|
||||||
&ExternalConfig::default(),
|
|
||||||
Some(vec![]),
|
|
||||||
);
|
|
||||||
let expected = Pyproject::new(Options {
|
|
||||||
line_length: Some(LineLength::try_from(100).unwrap()),
|
|
||||||
lint: Some(LintOptions {
|
|
||||||
common: lint_default_options([]),
|
|
||||||
..LintOptions::default()
|
|
||||||
}),
|
|
||||||
..Options::default()
|
|
||||||
});
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_converts_underscores() {
|
|
||||||
let actual = convert(
|
|
||||||
&HashMap::from([(
|
|
||||||
"flake8".to_string(),
|
|
||||||
HashMap::from([("max_line_length".to_string(), Some("100".to_string()))]),
|
|
||||||
)]),
|
|
||||||
&ExternalConfig::default(),
|
|
||||||
Some(vec![]),
|
|
||||||
);
|
|
||||||
let expected = Pyproject::new(Options {
|
|
||||||
line_length: Some(LineLength::try_from(100).unwrap()),
|
|
||||||
lint: Some(LintOptions {
|
|
||||||
common: lint_default_options([]),
|
|
||||||
..LintOptions::default()
|
|
||||||
}),
|
|
||||||
..Options::default()
|
|
||||||
});
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_ignores_parse_errors() {
|
|
||||||
let actual = convert(
|
|
||||||
&HashMap::from([(
|
|
||||||
"flake8".to_string(),
|
|
||||||
HashMap::from([("max_line_length".to_string(), Some("abc".to_string()))]),
|
|
||||||
)]),
|
|
||||||
&ExternalConfig::default(),
|
|
||||||
Some(vec![]),
|
|
||||||
);
|
|
||||||
let expected = Pyproject::new(Options {
|
|
||||||
lint: Some(LintOptions {
|
|
||||||
common: lint_default_options([]),
|
|
||||||
..LintOptions::default()
|
|
||||||
}),
|
|
||||||
..Options::default()
|
|
||||||
});
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_converts_plugin_options() {
|
|
||||||
let actual = convert(
|
|
||||||
&HashMap::from([(
|
|
||||||
"flake8".to_string(),
|
|
||||||
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
|
||||||
)]),
|
|
||||||
&ExternalConfig::default(),
|
|
||||||
Some(vec![]),
|
|
||||||
);
|
|
||||||
let expected = Pyproject::new(Options {
|
|
||||||
lint: Some(LintOptions {
|
|
||||||
common: LintCommonOptions {
|
|
||||||
flake8_quotes: Some(Flake8QuotesOptions {
|
|
||||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
|
||||||
multiline_quotes: None,
|
|
||||||
docstring_quotes: None,
|
|
||||||
avoid_escape: None,
|
|
||||||
}),
|
|
||||||
..lint_default_options([])
|
|
||||||
},
|
|
||||||
..LintOptions::default()
|
|
||||||
}),
|
|
||||||
..Options::default()
|
|
||||||
});
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_converts_docstring_conventions() {
|
|
||||||
let actual = convert(
|
|
||||||
&HashMap::from([(
|
|
||||||
"flake8".to_string(),
|
|
||||||
HashMap::from([(
|
|
||||||
"docstring-convention".to_string(),
|
|
||||||
Some("numpy".to_string()),
|
|
||||||
)]),
|
|
||||||
)]),
|
|
||||||
&ExternalConfig::default(),
|
|
||||||
Some(vec![Plugin::Flake8Docstrings]),
|
|
||||||
);
|
|
||||||
let expected = Pyproject::new(Options {
|
|
||||||
lint: Some(LintOptions {
|
|
||||||
common: LintCommonOptions {
|
|
||||||
pydocstyle: Some(PydocstyleOptions {
|
|
||||||
convention: Some(Convention::Numpy),
|
|
||||||
ignore_decorators: None,
|
|
||||||
property_decorators: None,
|
|
||||||
}),
|
|
||||||
..lint_default_options([Linter::Pydocstyle.into()])
|
|
||||||
},
|
|
||||||
..LintOptions::default()
|
|
||||||
}),
|
|
||||||
..Options::default()
|
|
||||||
});
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_infers_plugins_if_omitted() {
|
|
||||||
let actual = convert(
|
|
||||||
&HashMap::from([(
|
|
||||||
"flake8".to_string(),
|
|
||||||
HashMap::from([("inline-quotes".to_string(), Some("single".to_string()))]),
|
|
||||||
)]),
|
|
||||||
&ExternalConfig::default(),
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
let expected = Pyproject::new(Options {
|
|
||||||
lint: Some(LintOptions {
|
|
||||||
common: LintCommonOptions {
|
|
||||||
flake8_quotes: Some(Flake8QuotesOptions {
|
|
||||||
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
|
|
||||||
multiline_quotes: None,
|
|
||||||
docstring_quotes: None,
|
|
||||||
avoid_escape: None,
|
|
||||||
}),
|
|
||||||
..lint_default_options([Linter::Flake8Quotes.into()])
|
|
||||||
},
|
|
||||||
..LintOptions::default()
|
|
||||||
}),
|
|
||||||
..Options::default()
|
|
||||||
});
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_converts_project_requires_python() -> Result<()> {
|
|
||||||
let actual = convert(
|
|
||||||
&HashMap::from([("flake8".to_string(), HashMap::default())]),
|
|
||||||
&ExternalConfig {
|
|
||||||
project: Some(&Project {
|
|
||||||
requires_python: Some(VersionSpecifiers::from_str(">=3.8.16, <3.11")?),
|
|
||||||
}),
|
|
||||||
..ExternalConfig::default()
|
|
||||||
},
|
|
||||||
Some(vec![]),
|
|
||||||
);
|
|
||||||
let expected = Pyproject::new(Options {
|
|
||||||
target_version: Some(PythonVersion::Py38),
|
|
||||||
lint: Some(LintOptions {
|
|
||||||
common: lint_default_options([]),
|
|
||||||
..LintOptions::default()
|
|
||||||
}),
|
|
||||||
..Options::default()
|
|
||||||
});
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
use super::black::Black;
|
|
||||||
use super::isort::Isort;
|
|
||||||
use super::pep621::Project;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub(crate) struct ExternalConfig<'a> {
|
|
||||||
pub(crate) black: Option<&'a Black>,
|
|
||||||
pub(crate) isort: Option<&'a Isort>,
|
|
||||||
pub(crate) project: Option<&'a Project>,
|
|
||||||
}
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
//! Extract isort configuration settings from a pyproject.toml.
|
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
/// The [isort configuration](https://pycqa.github.io/isort/docs/configuration/config_files.html).
|
|
||||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
|
||||||
pub(crate) struct Isort {
|
|
||||||
#[serde(alias = "src-paths", alias = "src_paths")]
|
|
||||||
pub(crate) src_paths: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
@ -1,80 +0,0 @@
|
||||||
//! Utility to generate Ruff's `pyproject.toml` section from a Flake8 INI file.
|
|
||||||
|
|
||||||
mod black;
|
|
||||||
mod converter;
|
|
||||||
mod external_config;
|
|
||||||
mod isort;
|
|
||||||
mod parser;
|
|
||||||
mod pep621;
|
|
||||||
mod plugin;
|
|
||||||
mod pyproject;
|
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use clap::Parser;
|
|
||||||
use configparser::ini::Ini;
|
|
||||||
|
|
||||||
use crate::converter::convert;
|
|
||||||
use crate::external_config::ExternalConfig;
|
|
||||||
use crate::plugin::Plugin;
|
|
||||||
use crate::pyproject::parse;
|
|
||||||
use ruff_linter::logging::{set_up_logging, LogLevel};
|
|
||||||
|
|
||||||
#[derive(Parser)]
|
|
||||||
#[command(
|
|
||||||
about = "Convert existing Flake8 configuration to Ruff.",
|
|
||||||
long_about = None
|
|
||||||
)]
|
|
||||||
struct Args {
|
|
||||||
/// Path to the Flake8 configuration file (e.g., `setup.cfg`, `tox.ini`, or
|
|
||||||
/// `.flake8`).
|
|
||||||
#[arg(required = true)]
|
|
||||||
file: PathBuf,
|
|
||||||
/// Optional path to a `pyproject.toml` file, used to ensure compatibility
|
|
||||||
/// with Black.
|
|
||||||
#[arg(long)]
|
|
||||||
pyproject: Option<PathBuf>,
|
|
||||||
/// List of plugins to enable.
|
|
||||||
#[arg(long, value_delimiter = ',')]
|
|
||||||
plugin: Option<Vec<Plugin>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() -> Result<()> {
|
|
||||||
set_up_logging(&LogLevel::Default)?;
|
|
||||||
|
|
||||||
let args = Args::parse();
|
|
||||||
|
|
||||||
// Read the INI file.
|
|
||||||
let mut ini = Ini::new_cs();
|
|
||||||
ini.set_multiline(true);
|
|
||||||
let config = ini.load(args.file).map_err(|msg| anyhow::anyhow!(msg))?;
|
|
||||||
|
|
||||||
// Read the pyproject.toml file.
|
|
||||||
let pyproject = args.pyproject.map(parse).transpose()?;
|
|
||||||
let external_config = pyproject
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|pyproject| pyproject.tool.as_ref())
|
|
||||||
.map(|tool| ExternalConfig {
|
|
||||||
black: tool.black.as_ref(),
|
|
||||||
isort: tool.isort.as_ref(),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.unwrap_or_default();
|
|
||||||
let external_config = ExternalConfig {
|
|
||||||
project: pyproject
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|pyproject| pyproject.project.as_ref()),
|
|
||||||
..external_config
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create Ruff's pyproject.toml section.
|
|
||||||
let pyproject = convert(&config, &external_config, args.plugin);
|
|
||||||
|
|
||||||
#[allow(clippy::print_stdout)]
|
|
||||||
{
|
|
||||||
println!("{}", toml::to_string_pretty(&pyproject)?);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
@ -1,391 +0,0 @@
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use anyhow::{bail, Result};
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
|
|
||||||
use ruff_linter::settings::types::PatternPrefixPair;
|
|
||||||
use ruff_linter::{warn_user, RuleSelector};
|
|
||||||
|
|
||||||
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
|
|
||||||
|
|
||||||
/// Parse a comma-separated list of `RuleSelector` values (e.g.,
|
|
||||||
/// "F401,E501").
|
|
||||||
pub(crate) fn parse_prefix_codes(value: &str) -> Vec<RuleSelector> {
|
|
||||||
let mut codes: Vec<RuleSelector> = vec![];
|
|
||||||
for code in COMMA_SEPARATED_LIST_RE.split(value) {
|
|
||||||
let code = code.trim();
|
|
||||||
if code.is_empty() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if let Ok(code) = RuleSelector::from_str(code) {
|
|
||||||
codes.push(code);
|
|
||||||
} else {
|
|
||||||
warn_user!("Unsupported prefix code: {code}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
codes
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a comma-separated list of strings (e.g., "__init__.py,__main__.py").
|
|
||||||
pub(crate) fn parse_strings(value: &str) -> Vec<String> {
|
|
||||||
COMMA_SEPARATED_LIST_RE
|
|
||||||
.split(value)
|
|
||||||
.map(str::trim)
|
|
||||||
.filter(|part| !part.is_empty())
|
|
||||||
.map(String::from)
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a boolean.
|
|
||||||
pub(crate) fn parse_bool(value: &str) -> Result<bool> {
|
|
||||||
match value.trim() {
|
|
||||||
"true" => Ok(true),
|
|
||||||
"false" => Ok(false),
|
|
||||||
_ => bail!("Unexpected boolean value: {value}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct Token {
|
|
||||||
token_name: TokenType,
|
|
||||||
src: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
enum TokenType {
|
|
||||||
Code,
|
|
||||||
File,
|
|
||||||
Colon,
|
|
||||||
Comma,
|
|
||||||
Ws,
|
|
||||||
Eof,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct State {
|
|
||||||
seen_sep: bool,
|
|
||||||
seen_colon: bool,
|
|
||||||
filenames: Vec<String>,
|
|
||||||
codes: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl State {
|
|
||||||
const fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
seen_sep: true,
|
|
||||||
seen_colon: false,
|
|
||||||
filenames: vec![],
|
|
||||||
codes: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generate the list of `StrRuleCodePair` pairs for the current
|
|
||||||
/// state.
|
|
||||||
fn parse(&self) -> Vec<PatternPrefixPair> {
|
|
||||||
let mut codes: Vec<PatternPrefixPair> = vec![];
|
|
||||||
for code in &self.codes {
|
|
||||||
if let Ok(code) = RuleSelector::from_str(code) {
|
|
||||||
for filename in &self.filenames {
|
|
||||||
codes.push(PatternPrefixPair {
|
|
||||||
pattern: filename.clone(),
|
|
||||||
prefix: code.clone(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
warn_user!("Unsupported prefix code: {code}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
codes
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Tokenize the raw 'files-to-codes' mapping.
|
|
||||||
fn tokenize_files_to_codes_mapping(value: &str) -> Vec<Token> {
|
|
||||||
let mut tokens = vec![];
|
|
||||||
let mut i = 0;
|
|
||||||
while i < value.len() {
|
|
||||||
for (token_re, token_name) in [
|
|
||||||
(
|
|
||||||
Regex::new(r"([A-Z]+[0-9]*)(?:$|\s|,)").unwrap(),
|
|
||||||
TokenType::Code,
|
|
||||||
),
|
|
||||||
(Regex::new(r"([^\s:,]+)").unwrap(), TokenType::File),
|
|
||||||
(Regex::new(r"(\s*:\s*)").unwrap(), TokenType::Colon),
|
|
||||||
(Regex::new(r"(\s*,\s*)").unwrap(), TokenType::Comma),
|
|
||||||
(Regex::new(r"(\s+)").unwrap(), TokenType::Ws),
|
|
||||||
] {
|
|
||||||
if let Some(cap) = token_re.captures(&value[i..]) {
|
|
||||||
let mat = cap.get(1).unwrap();
|
|
||||||
if mat.start() == 0 {
|
|
||||||
tokens.push(Token {
|
|
||||||
token_name,
|
|
||||||
src: mat.as_str().trim().to_string(),
|
|
||||||
});
|
|
||||||
i += mat.end();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
tokens.push(Token {
|
|
||||||
token_name: TokenType::Eof,
|
|
||||||
src: String::new(),
|
|
||||||
});
|
|
||||||
tokens
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a 'files-to-codes' mapping, mimicking Flake8's internal logic.
|
|
||||||
/// See: <https://github.com/PyCQA/flake8/blob/7dfe99616fc2f07c0017df2ba5fa884158f3ea8a/src/flake8/utils.py#L45>
|
|
||||||
pub(crate) fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair>> {
|
|
||||||
if value.trim().is_empty() {
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
let mut codes: Vec<PatternPrefixPair> = vec![];
|
|
||||||
let mut state = State::new();
|
|
||||||
for token in tokenize_files_to_codes_mapping(value) {
|
|
||||||
if matches!(token.token_name, TokenType::Comma | TokenType::Ws) {
|
|
||||||
state.seen_sep = true;
|
|
||||||
} else if !state.seen_colon {
|
|
||||||
if matches!(token.token_name, TokenType::Colon) {
|
|
||||||
state.seen_colon = true;
|
|
||||||
state.seen_sep = true;
|
|
||||||
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
|
||||||
state.filenames.push(token.src);
|
|
||||||
state.seen_sep = false;
|
|
||||||
} else {
|
|
||||||
bail!("Unexpected token: {:?}", token.token_name);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if matches!(token.token_name, TokenType::Eof) {
|
|
||||||
codes.extend(state.parse());
|
|
||||||
state = State::new();
|
|
||||||
} else if state.seen_sep && matches!(token.token_name, TokenType::Code) {
|
|
||||||
state.codes.push(token.src);
|
|
||||||
state.seen_sep = false;
|
|
||||||
} else if state.seen_sep && matches!(token.token_name, TokenType::File) {
|
|
||||||
codes.extend(state.parse());
|
|
||||||
state = State::new();
|
|
||||||
state.filenames.push(token.src);
|
|
||||||
state.seen_sep = false;
|
|
||||||
} else {
|
|
||||||
bail!("Unexpected token: {:?}", token.token_name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(codes)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
|
|
||||||
pub(crate) fn collect_per_file_ignores(
|
|
||||||
pairs: Vec<PatternPrefixPair>,
|
|
||||||
) -> FxHashMap<String, Vec<RuleSelector>> {
|
|
||||||
let mut per_file_ignores: FxHashMap<String, Vec<RuleSelector>> = FxHashMap::default();
|
|
||||||
for pair in pairs {
|
|
||||||
per_file_ignores
|
|
||||||
.entry(pair.pattern)
|
|
||||||
.or_default()
|
|
||||||
.push(pair.prefix);
|
|
||||||
}
|
|
||||||
per_file_ignores
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use anyhow::Result;
|
|
||||||
|
|
||||||
use ruff_linter::codes;
|
|
||||||
use ruff_linter::registry::Linter;
|
|
||||||
use ruff_linter::settings::types::PatternPrefixPair;
|
|
||||||
use ruff_linter::RuleSelector;
|
|
||||||
|
|
||||||
use super::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_parses_prefix_codes() {
|
|
||||||
let actual = parse_prefix_codes("");
|
|
||||||
let expected: Vec<RuleSelector> = vec![];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_prefix_codes(" ");
|
|
||||||
let expected: Vec<RuleSelector> = vec![];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_prefix_codes("F401");
|
|
||||||
let expected = vec![codes::Pyflakes::_401.into()];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_prefix_codes("F401,");
|
|
||||||
let expected = vec![codes::Pyflakes::_401.into()];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_prefix_codes("F401,E501");
|
|
||||||
let expected = vec![
|
|
||||||
codes::Pyflakes::_401.into(),
|
|
||||||
codes::Pycodestyle::E501.into(),
|
|
||||||
];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_prefix_codes("F401, E501");
|
|
||||||
let expected = vec![
|
|
||||||
codes::Pyflakes::_401.into(),
|
|
||||||
codes::Pycodestyle::E501.into(),
|
|
||||||
];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_parses_strings() {
|
|
||||||
let actual = parse_strings("");
|
|
||||||
let expected: Vec<String> = vec![];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_strings(" ");
|
|
||||||
let expected: Vec<String> = vec![];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_strings("__init__.py");
|
|
||||||
let expected = vec!["__init__.py".to_string()];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_strings("__init__.py,");
|
|
||||||
let expected = vec!["__init__.py".to_string()];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_strings("__init__.py,__main__.py");
|
|
||||||
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_strings("__init__.py, __main__.py");
|
|
||||||
let expected = vec!["__init__.py".to_string(), "__main__.py".to_string()];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_parse_files_to_codes_mapping() -> Result<()> {
|
|
||||||
let actual = parse_files_to_codes_mapping("")?;
|
|
||||||
let expected: Vec<PatternPrefixPair> = vec![];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = parse_files_to_codes_mapping(" ")?;
|
|
||||||
let expected: Vec<PatternPrefixPair> = vec![];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
// Ex) locust
|
|
||||||
let actual = parse_files_to_codes_mapping(
|
|
||||||
"per-file-ignores =
|
|
||||||
locust/test/*: F841
|
|
||||||
examples/*: F841
|
|
||||||
*.pyi: E302,E704"
|
|
||||||
.strip_prefix("per-file-ignores =")
|
|
||||||
.unwrap(),
|
|
||||||
)?;
|
|
||||||
let expected: Vec<PatternPrefixPair> = vec![
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "locust/test/*".to_string(),
|
|
||||||
prefix: codes::Pyflakes::_841.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "examples/*".to_string(),
|
|
||||||
prefix: codes::Pyflakes::_841.into(),
|
|
||||||
},
|
|
||||||
];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
// Ex) celery
|
|
||||||
let actual = parse_files_to_codes_mapping(
|
|
||||||
"per-file-ignores =
|
|
||||||
t/*,setup.py,examples/*,docs/*,extra/*:
|
|
||||||
D,"
|
|
||||||
.strip_prefix("per-file-ignores =")
|
|
||||||
.unwrap(),
|
|
||||||
)?;
|
|
||||||
let expected: Vec<PatternPrefixPair> = vec![
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "t/*".to_string(),
|
|
||||||
prefix: Linter::Pydocstyle.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "setup.py".to_string(),
|
|
||||||
prefix: Linter::Pydocstyle.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "examples/*".to_string(),
|
|
||||||
prefix: Linter::Pydocstyle.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "docs/*".to_string(),
|
|
||||||
prefix: Linter::Pydocstyle.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "extra/*".to_string(),
|
|
||||||
prefix: Linter::Pydocstyle.into(),
|
|
||||||
},
|
|
||||||
];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
// Ex) scrapy
|
|
||||||
let actual = parse_files_to_codes_mapping(
|
|
||||||
"per-file-ignores =
|
|
||||||
scrapy/__init__.py:E402
|
|
||||||
scrapy/core/downloader/handlers/http.py:F401
|
|
||||||
scrapy/http/__init__.py:F401
|
|
||||||
scrapy/linkextractors/__init__.py:E402,F401
|
|
||||||
scrapy/selector/__init__.py:F401
|
|
||||||
scrapy/spiders/__init__.py:E402,F401
|
|
||||||
scrapy/utils/url.py:F403,F405
|
|
||||||
tests/test_loader.py:E741"
|
|
||||||
.strip_prefix("per-file-ignores =")
|
|
||||||
.unwrap(),
|
|
||||||
)?;
|
|
||||||
let expected: Vec<PatternPrefixPair> = vec![
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/__init__.py".to_string(),
|
|
||||||
prefix: codes::Pycodestyle::E402.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
|
|
||||||
prefix: codes::Pyflakes::_401.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/http/__init__.py".to_string(),
|
|
||||||
prefix: codes::Pyflakes::_401.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
|
||||||
prefix: codes::Pycodestyle::E402.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/linkextractors/__init__.py".to_string(),
|
|
||||||
prefix: codes::Pyflakes::_401.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/selector/__init__.py".to_string(),
|
|
||||||
prefix: codes::Pyflakes::_401.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/spiders/__init__.py".to_string(),
|
|
||||||
prefix: codes::Pycodestyle::E402.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/spiders/__init__.py".to_string(),
|
|
||||||
prefix: codes::Pyflakes::_401.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/utils/url.py".to_string(),
|
|
||||||
prefix: codes::Pyflakes::_403.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "scrapy/utils/url.py".to_string(),
|
|
||||||
prefix: codes::Pyflakes::_405.into(),
|
|
||||||
},
|
|
||||||
PatternPrefixPair {
|
|
||||||
pattern: "tests/test_loader.py".to_string(),
|
|
||||||
prefix: codes::Pycodestyle::E741.into(),
|
|
||||||
},
|
|
||||||
];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
//! Extract PEP 621 configuration settings from a pyproject.toml.
|
|
||||||
|
|
||||||
use pep440_rs::VersionSpecifiers;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
|
|
||||||
pub(crate) struct Project {
|
|
||||||
#[serde(alias = "requires-python", alias = "requires_python")]
|
|
||||||
pub(crate) requires_python: Option<VersionSpecifiers>,
|
|
||||||
}
|
|
||||||
|
|
@ -1,368 +0,0 @@
|
||||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
|
||||||
use std::fmt;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use anyhow::anyhow;
|
|
||||||
use ruff_linter::registry::Linter;
|
|
||||||
use ruff_linter::rule_selector::PreviewOptions;
|
|
||||||
use ruff_linter::RuleSelector;
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]
|
|
||||||
pub enum Plugin {
|
|
||||||
Flake82020,
|
|
||||||
Flake8Annotations,
|
|
||||||
Flake8Bandit,
|
|
||||||
Flake8BlindExcept,
|
|
||||||
Flake8BooleanTrap,
|
|
||||||
Flake8Bugbear,
|
|
||||||
Flake8Builtins,
|
|
||||||
Flake8Commas,
|
|
||||||
Flake8Comprehensions,
|
|
||||||
Flake8Datetimez,
|
|
||||||
Flake8Debugger,
|
|
||||||
Flake8Docstrings,
|
|
||||||
Flake8Eradicate,
|
|
||||||
Flake8ErrMsg,
|
|
||||||
Flake8Executable,
|
|
||||||
Flake8ImplicitStrConcat,
|
|
||||||
Flake8ImportConventions,
|
|
||||||
Flake8NoPep420,
|
|
||||||
Flake8Pie,
|
|
||||||
Flake8Print,
|
|
||||||
Flake8PytestStyle,
|
|
||||||
Flake8Quotes,
|
|
||||||
Flake8Return,
|
|
||||||
Flake8Simplify,
|
|
||||||
Flake8TidyImports,
|
|
||||||
Flake8TypeChecking,
|
|
||||||
Flake8UnusedArguments,
|
|
||||||
Flake8UsePathlib,
|
|
||||||
McCabe,
|
|
||||||
PEP8Naming,
|
|
||||||
PandasVet,
|
|
||||||
Pyupgrade,
|
|
||||||
Tryceratops,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for Plugin {
|
|
||||||
type Err = anyhow::Error;
|
|
||||||
|
|
||||||
fn from_str(string: &str) -> Result<Self, Self::Err> {
|
|
||||||
match string {
|
|
||||||
"flake8-2020" => Ok(Plugin::Flake82020),
|
|
||||||
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
|
|
||||||
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
|
|
||||||
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
|
|
||||||
"flake8-boolean-trap" => Ok(Plugin::Flake8BooleanTrap),
|
|
||||||
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
|
|
||||||
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
|
|
||||||
"flake8-commas" => Ok(Plugin::Flake8Commas),
|
|
||||||
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
|
|
||||||
"flake8-datetimez" => Ok(Plugin::Flake8Datetimez),
|
|
||||||
"flake8-debugger" => Ok(Plugin::Flake8Debugger),
|
|
||||||
"flake8-docstrings" => Ok(Plugin::Flake8Docstrings),
|
|
||||||
"flake8-eradicate" => Ok(Plugin::Flake8Eradicate),
|
|
||||||
"flake8-errmsg" => Ok(Plugin::Flake8ErrMsg),
|
|
||||||
"flake8-executable" => Ok(Plugin::Flake8Executable),
|
|
||||||
"flake8-implicit-str-concat" => Ok(Plugin::Flake8ImplicitStrConcat),
|
|
||||||
"flake8-import-conventions" => Ok(Plugin::Flake8ImportConventions),
|
|
||||||
"flake8-no-pep420" => Ok(Plugin::Flake8NoPep420),
|
|
||||||
"flake8-pie" => Ok(Plugin::Flake8Pie),
|
|
||||||
"flake8-print" => Ok(Plugin::Flake8Print),
|
|
||||||
"flake8-pytest-style" => Ok(Plugin::Flake8PytestStyle),
|
|
||||||
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
|
|
||||||
"flake8-return" => Ok(Plugin::Flake8Return),
|
|
||||||
"flake8-simplify" => Ok(Plugin::Flake8Simplify),
|
|
||||||
"flake8-tidy-imports" => Ok(Plugin::Flake8TidyImports),
|
|
||||||
"flake8-type-checking" => Ok(Plugin::Flake8TypeChecking),
|
|
||||||
"flake8-unused-arguments" => Ok(Plugin::Flake8UnusedArguments),
|
|
||||||
"flake8-use-pathlib" => Ok(Plugin::Flake8UsePathlib),
|
|
||||||
"mccabe" => Ok(Plugin::McCabe),
|
|
||||||
"pep8-naming" => Ok(Plugin::PEP8Naming),
|
|
||||||
"pandas-vet" => Ok(Plugin::PandasVet),
|
|
||||||
"pyupgrade" => Ok(Plugin::Pyupgrade),
|
|
||||||
"tryceratops" => Ok(Plugin::Tryceratops),
|
|
||||||
_ => Err(anyhow!("Unknown plugin: {string}")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for Plugin {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
match self {
|
|
||||||
Plugin::Flake82020 => "flake8-2020",
|
|
||||||
Plugin::Flake8Annotations => "flake8-annotations",
|
|
||||||
Plugin::Flake8Bandit => "flake8-bandit",
|
|
||||||
Plugin::Flake8BlindExcept => "flake8-blind-except",
|
|
||||||
Plugin::Flake8BooleanTrap => "flake8-boolean-trap",
|
|
||||||
Plugin::Flake8Bugbear => "flake8-bugbear",
|
|
||||||
Plugin::Flake8Builtins => "flake8-builtins",
|
|
||||||
Plugin::Flake8Commas => "flake8-commas",
|
|
||||||
Plugin::Flake8Comprehensions => "flake8-comprehensions",
|
|
||||||
Plugin::Flake8Datetimez => "flake8-datetimez",
|
|
||||||
Plugin::Flake8Debugger => "flake8-debugger",
|
|
||||||
Plugin::Flake8Docstrings => "flake8-docstrings",
|
|
||||||
Plugin::Flake8Eradicate => "flake8-eradicate",
|
|
||||||
Plugin::Flake8ErrMsg => "flake8-errmsg",
|
|
||||||
Plugin::Flake8Executable => "flake8-executable",
|
|
||||||
Plugin::Flake8ImplicitStrConcat => "flake8-implicit-str-concat",
|
|
||||||
Plugin::Flake8ImportConventions => "flake8-import-conventions",
|
|
||||||
Plugin::Flake8NoPep420 => "flake8-no-pep420",
|
|
||||||
Plugin::Flake8Pie => "flake8-pie",
|
|
||||||
Plugin::Flake8Print => "flake8-print",
|
|
||||||
Plugin::Flake8PytestStyle => "flake8-pytest-style",
|
|
||||||
Plugin::Flake8Quotes => "flake8-quotes",
|
|
||||||
Plugin::Flake8Return => "flake8-return",
|
|
||||||
Plugin::Flake8Simplify => "flake8-simplify",
|
|
||||||
Plugin::Flake8TidyImports => "flake8-tidy-imports",
|
|
||||||
Plugin::Flake8TypeChecking => "flake8-type-checking",
|
|
||||||
Plugin::Flake8UnusedArguments => "flake8-unused-arguments",
|
|
||||||
Plugin::Flake8UsePathlib => "flake8-use-pathlib",
|
|
||||||
Plugin::McCabe => "mccabe",
|
|
||||||
Plugin::PEP8Naming => "pep8-naming",
|
|
||||||
Plugin::PandasVet => "pandas-vet",
|
|
||||||
Plugin::Pyupgrade => "pyupgrade",
|
|
||||||
Plugin::Tryceratops => "tryceratops",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Plugin> for Linter {
|
|
||||||
fn from(plugin: &Plugin) -> Self {
|
|
||||||
match plugin {
|
|
||||||
Plugin::Flake82020 => Linter::Flake82020,
|
|
||||||
Plugin::Flake8Annotations => Linter::Flake8Annotations,
|
|
||||||
Plugin::Flake8Bandit => Linter::Flake8Bandit,
|
|
||||||
Plugin::Flake8BlindExcept => Linter::Flake8BlindExcept,
|
|
||||||
Plugin::Flake8BooleanTrap => Linter::Flake8BooleanTrap,
|
|
||||||
Plugin::Flake8Bugbear => Linter::Flake8Bugbear,
|
|
||||||
Plugin::Flake8Builtins => Linter::Flake8Builtins,
|
|
||||||
Plugin::Flake8Commas => Linter::Flake8Commas,
|
|
||||||
Plugin::Flake8Comprehensions => Linter::Flake8Comprehensions,
|
|
||||||
Plugin::Flake8Datetimez => Linter::Flake8Datetimez,
|
|
||||||
Plugin::Flake8Debugger => Linter::Flake8Debugger,
|
|
||||||
Plugin::Flake8Docstrings => Linter::Pydocstyle,
|
|
||||||
Plugin::Flake8Eradicate => Linter::Eradicate,
|
|
||||||
Plugin::Flake8ErrMsg => Linter::Flake8ErrMsg,
|
|
||||||
Plugin::Flake8Executable => Linter::Flake8Executable,
|
|
||||||
Plugin::Flake8ImplicitStrConcat => Linter::Flake8ImplicitStrConcat,
|
|
||||||
Plugin::Flake8ImportConventions => Linter::Flake8ImportConventions,
|
|
||||||
Plugin::Flake8NoPep420 => Linter::Flake8NoPep420,
|
|
||||||
Plugin::Flake8Pie => Linter::Flake8Pie,
|
|
||||||
Plugin::Flake8Print => Linter::Flake8Print,
|
|
||||||
Plugin::Flake8PytestStyle => Linter::Flake8PytestStyle,
|
|
||||||
Plugin::Flake8Quotes => Linter::Flake8Quotes,
|
|
||||||
Plugin::Flake8Return => Linter::Flake8Return,
|
|
||||||
Plugin::Flake8Simplify => Linter::Flake8Simplify,
|
|
||||||
Plugin::Flake8TidyImports => Linter::Flake8TidyImports,
|
|
||||||
Plugin::Flake8TypeChecking => Linter::Flake8TypeChecking,
|
|
||||||
Plugin::Flake8UnusedArguments => Linter::Flake8UnusedArguments,
|
|
||||||
Plugin::Flake8UsePathlib => Linter::Flake8UsePathlib,
|
|
||||||
Plugin::McCabe => Linter::McCabe,
|
|
||||||
Plugin::PEP8Naming => Linter::PEP8Naming,
|
|
||||||
Plugin::PandasVet => Linter::PandasVet,
|
|
||||||
Plugin::Pyupgrade => Linter::Pyupgrade,
|
|
||||||
Plugin::Tryceratops => Linter::Tryceratops,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Infer the enabled plugins based on user-provided options.
|
|
||||||
///
|
|
||||||
/// For example, if the user specified a `mypy-init-return` setting, we should
|
|
||||||
/// infer that `flake8-annotations` is active.
|
|
||||||
pub(crate) fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> Vec<Plugin> {
|
|
||||||
let mut plugins = BTreeSet::new();
|
|
||||||
for key in flake8.keys() {
|
|
||||||
match key.as_str() {
|
|
||||||
// flake8-annotations
|
|
||||||
"suppress-none-returning" | "suppress_none_returning" => {
|
|
||||||
plugins.insert(Plugin::Flake8Annotations);
|
|
||||||
}
|
|
||||||
"suppress-dummy-args" | "suppress_dummy_args" => {
|
|
||||||
plugins.insert(Plugin::Flake8Annotations);
|
|
||||||
}
|
|
||||||
"allow-untyped-defs" | "allow_untyped_defs" => {
|
|
||||||
plugins.insert(Plugin::Flake8Annotations);
|
|
||||||
}
|
|
||||||
"allow-untyped-nested" | "allow_untyped_nested" => {
|
|
||||||
plugins.insert(Plugin::Flake8Annotations);
|
|
||||||
}
|
|
||||||
"mypy-init-return" | "mypy_init_return" => {
|
|
||||||
plugins.insert(Plugin::Flake8Annotations);
|
|
||||||
}
|
|
||||||
"dispatch-decorators" | "dispatch_decorators" => {
|
|
||||||
plugins.insert(Plugin::Flake8Annotations);
|
|
||||||
}
|
|
||||||
"overload-decorators" | "overload_decorators" => {
|
|
||||||
plugins.insert(Plugin::Flake8Annotations);
|
|
||||||
}
|
|
||||||
"allow-star-arg-any" | "allow_star_arg_any" => {
|
|
||||||
plugins.insert(Plugin::Flake8Annotations);
|
|
||||||
}
|
|
||||||
// flake8-bugbear
|
|
||||||
"extend-immutable-calls" | "extend_immutable_calls" => {
|
|
||||||
plugins.insert(Plugin::Flake8Bugbear);
|
|
||||||
}
|
|
||||||
// flake8-builtins
|
|
||||||
"builtins-ignorelist" | "builtins_ignorelist" => {
|
|
||||||
plugins.insert(Plugin::Flake8Builtins);
|
|
||||||
}
|
|
||||||
// flake8-docstrings
|
|
||||||
"docstring-convention" | "docstring_convention" => {
|
|
||||||
plugins.insert(Plugin::Flake8Docstrings);
|
|
||||||
}
|
|
||||||
// flake8-eradicate
|
|
||||||
"eradicate-aggressive" | "eradicate_aggressive" => {
|
|
||||||
plugins.insert(Plugin::Flake8Eradicate);
|
|
||||||
}
|
|
||||||
"eradicate-whitelist" | "eradicate_whitelist" => {
|
|
||||||
plugins.insert(Plugin::Flake8Eradicate);
|
|
||||||
}
|
|
||||||
"eradicate-whitelist-extend" | "eradicate_whitelist_extend" => {
|
|
||||||
plugins.insert(Plugin::Flake8Eradicate);
|
|
||||||
}
|
|
||||||
// flake8-pytest-style
|
|
||||||
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
|
|
||||||
plugins.insert(Plugin::Flake8PytestStyle);
|
|
||||||
}
|
|
||||||
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
|
|
||||||
plugins.insert(Plugin::Flake8PytestStyle);
|
|
||||||
}
|
|
||||||
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
|
|
||||||
plugins.insert(Plugin::Flake8PytestStyle);
|
|
||||||
}
|
|
||||||
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
|
|
||||||
plugins.insert(Plugin::Flake8PytestStyle);
|
|
||||||
}
|
|
||||||
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
|
|
||||||
plugins.insert(Plugin::Flake8PytestStyle);
|
|
||||||
}
|
|
||||||
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
|
|
||||||
plugins.insert(Plugin::Flake8PytestStyle);
|
|
||||||
}
|
|
||||||
// flake8-quotes
|
|
||||||
"quotes" | "inline-quotes" | "inline_quotes" => {
|
|
||||||
plugins.insert(Plugin::Flake8Quotes);
|
|
||||||
}
|
|
||||||
"multiline-quotes" | "multiline_quotes" => {
|
|
||||||
plugins.insert(Plugin::Flake8Quotes);
|
|
||||||
}
|
|
||||||
"docstring-quotes" | "docstring_quotes" => {
|
|
||||||
plugins.insert(Plugin::Flake8Quotes);
|
|
||||||
}
|
|
||||||
"avoid-escape" | "avoid_escape" => {
|
|
||||||
plugins.insert(Plugin::Flake8Quotes);
|
|
||||||
}
|
|
||||||
// flake8-tidy-imports
|
|
||||||
"ban-relative-imports" | "ban_relative_imports" => {
|
|
||||||
plugins.insert(Plugin::Flake8TidyImports);
|
|
||||||
}
|
|
||||||
"banned-modules" | "banned_modules" => {
|
|
||||||
plugins.insert(Plugin::Flake8TidyImports);
|
|
||||||
}
|
|
||||||
// mccabe
|
|
||||||
"max-complexity" | "max_complexity" => {
|
|
||||||
plugins.insert(Plugin::McCabe);
|
|
||||||
}
|
|
||||||
// pep8-naming
|
|
||||||
"ignore-names" | "ignore_names" => {
|
|
||||||
plugins.insert(Plugin::PEP8Naming);
|
|
||||||
}
|
|
||||||
"classmethod-decorators" | "classmethod_decorators" => {
|
|
||||||
plugins.insert(Plugin::PEP8Naming);
|
|
||||||
}
|
|
||||||
"staticmethod-decorators" | "staticmethod_decorators" => {
|
|
||||||
plugins.insert(Plugin::PEP8Naming);
|
|
||||||
}
|
|
||||||
"max-string-length" | "max_string_length" => {
|
|
||||||
plugins.insert(Plugin::Flake8ErrMsg);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Vec::from_iter(plugins)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Infer the enabled plugins based on the referenced prefixes.
|
|
||||||
///
|
|
||||||
/// For example, if the user ignores `ANN101`, we should infer that
|
|
||||||
/// `flake8-annotations` is active.
|
|
||||||
pub(crate) fn infer_plugins_from_codes(selectors: &HashSet<RuleSelector>) -> Vec<Plugin> {
|
|
||||||
// Ignore cases in which we've knowingly changed rule prefixes.
|
|
||||||
[
|
|
||||||
Plugin::Flake82020,
|
|
||||||
Plugin::Flake8Annotations,
|
|
||||||
Plugin::Flake8Bandit,
|
|
||||||
// Plugin::Flake8BlindExcept,
|
|
||||||
Plugin::Flake8BooleanTrap,
|
|
||||||
Plugin::Flake8Bugbear,
|
|
||||||
Plugin::Flake8Builtins,
|
|
||||||
// Plugin::Flake8Commas,
|
|
||||||
Plugin::Flake8Comprehensions,
|
|
||||||
Plugin::Flake8Datetimez,
|
|
||||||
Plugin::Flake8Debugger,
|
|
||||||
Plugin::Flake8Docstrings,
|
|
||||||
// Plugin::Flake8Eradicate,
|
|
||||||
Plugin::Flake8ErrMsg,
|
|
||||||
Plugin::Flake8Executable,
|
|
||||||
Plugin::Flake8ImplicitStrConcat,
|
|
||||||
// Plugin::Flake8ImportConventions,
|
|
||||||
Plugin::Flake8NoPep420,
|
|
||||||
Plugin::Flake8Pie,
|
|
||||||
Plugin::Flake8Print,
|
|
||||||
Plugin::Flake8PytestStyle,
|
|
||||||
Plugin::Flake8Quotes,
|
|
||||||
Plugin::Flake8Return,
|
|
||||||
Plugin::Flake8Simplify,
|
|
||||||
// Plugin::Flake8TidyImports,
|
|
||||||
// Plugin::Flake8TypeChecking,
|
|
||||||
Plugin::Flake8UnusedArguments,
|
|
||||||
// Plugin::Flake8UsePathlib,
|
|
||||||
Plugin::McCabe,
|
|
||||||
Plugin::PEP8Naming,
|
|
||||||
Plugin::PandasVet,
|
|
||||||
Plugin::Tryceratops,
|
|
||||||
]
|
|
||||||
.into_iter()
|
|
||||||
.filter(|plugin| {
|
|
||||||
for selector in selectors {
|
|
||||||
if selector
|
|
||||||
.rules(&PreviewOptions::default())
|
|
||||||
.any(|rule| Linter::from(plugin).rules().any(|r| r == rule))
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use super::{infer_plugins_from_options, Plugin};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn it_infers_plugins() {
|
|
||||||
let actual = infer_plugins_from_options(&HashMap::from([(
|
|
||||||
"inline-quotes".to_string(),
|
|
||||||
Some("single".to_string()),
|
|
||||||
)]));
|
|
||||||
let expected = vec![Plugin::Flake8Quotes];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
|
|
||||||
let actual = infer_plugins_from_options(&HashMap::from([(
|
|
||||||
"staticmethod-decorators".to_string(),
|
|
||||||
Some("[]".to_string()),
|
|
||||||
)]));
|
|
||||||
let expected = vec![Plugin::PEP8Naming];
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use super::black::Black;
|
|
||||||
use super::isort::Isort;
|
|
||||||
use super::pep621::Project;
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub(crate) struct Tools {
|
|
||||||
pub(crate) black: Option<Black>,
|
|
||||||
pub(crate) isort: Option<Isort>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
pub(crate) struct Pyproject {
|
|
||||||
pub(crate) tool: Option<Tools>,
|
|
||||||
pub(crate) project: Option<Project>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn parse<P: AsRef<Path>>(path: P) -> Result<Pyproject> {
|
|
||||||
let contents = std::fs::read_to_string(path)?;
|
|
||||||
let pyproject = toml::from_str::<Pyproject>(&contents)?;
|
|
||||||
Ok(pyproject)
|
|
||||||
}
|
|
||||||
|
|
@ -93,7 +93,6 @@ changelog_contributors = false
|
||||||
version_files = [
|
version_files = [
|
||||||
"README.md",
|
"README.md",
|
||||||
"docs/integrations.md",
|
"docs/integrations.md",
|
||||||
"crates/flake8_to_ruff/Cargo.toml",
|
|
||||||
"crates/ruff_cli/Cargo.toml",
|
"crates/ruff_cli/Cargo.toml",
|
||||||
"crates/ruff_linter/Cargo.toml",
|
"crates/ruff_linter/Cargo.toml",
|
||||||
"crates/ruff_shrinking/Cargo.toml",
|
"crates/ruff_shrinking/Cargo.toml",
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue