diff --git a/.editorconfig b/.editorconfig index 9f02a19e..c4f3c65d 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,7 +1,7 @@ root = true [*.{py,pyi,rs,toml,md}] -charset = utf-8 +charset = "utf-8" end_of_line = lf indent_size = 4 indent_style = space diff --git a/.flake8 b/.flake8 index eb0260b0..cf63afc5 100644 --- a/.flake8 +++ b/.flake8 @@ -1,126 +1,69 @@ [flake8] ignore = - # unnecessary list comprehension; A generator only better than a list - # comprehension if we don't always need to iterate through all items in - # the generator (based on the use case). - C407, + C407, # unnecessary list comprehension; A generator only better than a list + # comprehension if we don't always need to iterate through all items in + # the generator (based on the use case). + # The following codes belong to pycodestyle, and overlap with black: - # indentation contains mixed spaces and tabs - E101, - # indentation is not a multiple of four - E111, - # expected an indented block - E112, - # unexpected indentation - E113, - # indentation is not a multiple of four (comment) - E114, - # expected an indented block (comment) - E115, - # unexpected indentation (comment) - E116, - # continuation line under-indented for hanging indent - E121, - # continuation line missing indentation or outdented - E122, - # closing bracket does not match indentation of opening bracket’s line - E123, - # closing bracket does not match visual indentation - E124, - # continuation line with same indent as next logical line - E125, - # continuation line over-indented for hanging indent - E126, - # continuation line over-indented for visual indent; is harmless - # (over-indent is visually unambiguous) and currently generates too - # many warnings for existing code. - E127, - - # continuation line under-indented for visual indent - E128, - # visually indented line with same indent as next logical line - E129, - # continuation line unaligned for hanging indent - E131, - # closing bracket is missing indentation - E133, - # whitespace after ‘(‘ - E201, - # whitespace before ‘)’ - E202, - # whitespace before ‘:’; this warning is invalid for slices - E203, - # whitespace before ‘(‘ - E211, - # multiple spaces before operator - E221, - # multiple spaces after operator - E222, - # tab before operator - E223, - # tab after operator - E224, - # missing whitespace around operator - E225, - # missing whitespace around arithmetic operator - E226, - # missing whitespace around bitwise or shift operator - E227, - # missing whitespace around modulo operator - E228, - # missing whitespace after ‘,’, ‘;’, or ‘:’ - E231, - # multiple spaces after ‘,’ - E241, - # tab after ‘,’ - E242, - # unexpected spaces around keyword / parameter equals - E251, - # at least two spaces before inline comment - E261, - # inline comment should start with ‘# ‘ - E262, - # block comment should start with ‘# ‘ - E265, - # too many leading ‘#’ for block comment - E266, - # multiple spaces after keyword - E271, - # multiple spaces before keyword - E272, - # tab after keyword - E273, - # tab before keyword - E274, - # missing whitespace after keyword - E275, - # expected 1 blank line, found 0 - E301, - # expected 2 blank lines, found 0 - E302, - # too many blank lines (3) - E303, - # blank lines found after function decorator - E304, - # expected 2 blank lines after end of function or class - E305, - # expected 1 blank line before a nested definition - E306, - # multiple imports on one line - E401, - # line too long (> 79 characters) - E501, - # the backslash is redundant between brackets - E502, - # multiple statements on one line (colon) - E701, - # multiple statements on one line (semicolon) - E702, - # statement ends with a semicolon - E703, - # multiple statements on one line (def) - E704, + E101, # indentation contains mixed spaces and tabs + E111, # indentation is not a multiple of four + E112, # expected an indented block + E113, # unexpected indentation + E114, # indentation is not a multiple of four (comment) + E115, # expected an indented block (comment) + E116, # unexpected indentation (comment) + E121, # continuation line under-indented for hanging indent + E122, # continuation line missing indentation or outdented + E123, # closing bracket does not match indentation of opening bracket’s line + E124, # closing bracket does not match visual indentation + E125, # continuation line with same indent as next logical line + E126, # continuation line over-indented for hanging indent + E127, # continuation line over-indented for visual indent; is harmless + # (over-indent is visually unambiguous) and currently generates too + # many warnings for existing code. + E128, # continuation line under-indented for visual indent + E129, # visually indented line with same indent as next logical line + E131, # continuation line unaligned for hanging indent + E133, # closing bracket is missing indentation + E201, # whitespace after ‘(‘ + E202, # whitespace before ‘)’ + E203, # whitespace before ‘:’; this warning is invalid for slices + E211, # whitespace before ‘(‘ + E221, # multiple spaces before operator + E222, # multiple spaces after operator + E223, # tab before operator + E224, # tab after operator + E225, # missing whitespace around operator + E226, # missing whitespace around arithmetic operator + E227, # missing whitespace around bitwise or shift operator + E228, # missing whitespace around modulo operator + E231, # missing whitespace after ‘,’, ‘;’, or ‘:’ + E241, # multiple spaces after ‘,’ + E242, # tab after ‘,’ + E251, # unexpected spaces around keyword / parameter equals + E261, # at least two spaces before inline comment + E262, # inline comment should start with ‘# ‘ + E265, # block comment should start with ‘# ‘ + E266, # too many leading ‘#’ for block comment + E271, # multiple spaces after keyword + E272, # multiple spaces before keyword + E273, # tab after keyword + E274, # tab before keyword + E275, # missing whitespace after keyword + E301, # expected 1 blank line, found 0 + E302, # expected 2 blank lines, found 0 + E303, # too many blank lines (3) + E304, # blank lines found after function decorator + E305, # expected 2 blank lines after end of function or class + E306, # expected 1 blank line before a nested definition + E401, # multiple imports on one line + E501, # line too long (> 79 characters) + E502, # the backslash is redundant between brackets + E701, # multiple statements on one line (colon) + E702, # multiple statements on one line (semicolon) + E703, # statement ends with a semicolon + E704, # multiple statements on one line (def) # These are pycodestyle lints that black doesn't catch: # E711, # comparison to None should be ‘if cond is None:’ # E712, # comparison to True should be ‘if cond is True:’ or ‘if cond:’ @@ -135,25 +78,16 @@ ignore = # I think these are internal to pycodestyle? # E901, # SyntaxError or IndentationError # E902, # IOError - # isn't aware of type-only imports, results in false-positives - F811, - # indentation contains tabs - W191, - # trailing whitespace - W291, - # no newline at end of file - W292, - # blank line contains whitespace - W293, - # blank line at end of file - W391, - # line break before binary operator; binary operator in a new line is - # the standard - W503, - # line break after binary operator - W504, - # not part of PEP8; doc line too long (> 79 characters) - W505, + F811, # isn't aware of type-only imports, results in false-positives + W191, # indentation contains tabs + W291, # trailing whitespace + W292, # no newline at end of file + W293, # blank line contains whitespace + W391, # blank line at end of file + W503, # line break before binary operator; binary operator in a new line is + # the standard + W504, # line break after binary operator + W505, # not part of PEP8; doc line too long (> 79 characters) # These are pycodestyle lints that black doesn't catch: # W601, # .has_key() is deprecated, use ‘in’ # W602, # deprecated form of raising exception diff --git a/.github/build-matrix.json b/.github/build-matrix.json deleted file mode 100644 index 3a1db7b3..00000000 --- a/.github/build-matrix.json +++ /dev/null @@ -1,31 +0,0 @@ -[ - { - "vers": "x86_64", - "os": "ubuntu-20.04" - }, - { - "vers": "i686", - "os": "ubuntu-20.04" - }, - { - "vers": "arm64", - "os": "macos-latest" - }, - { - "vers": "auto64", - "os": "macos-latest" - }, - { - "vers": "auto64", - "os": "windows-2019" - }, - { - "vers": "aarch64", - "os": [ - "self-hosted", - "linux", - "ARM64" - ], - "on_ref_regex": "^refs/(heads/main|tags/.*)$" - } -] \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index 40738c8d..00000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,18 +0,0 @@ -# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file - -version: 2 -updates: - - package-ecosystem: pip - directory: "/" - schedule: - interval: weekly - - - package-ecosystem: cargo - directory: "/native" - schedule: - interval: weekly - - - package-ecosystem: github-actions - directory: "/" - schedule: - interval: weekly diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0df65636..68359560 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,45 +1,283 @@ -name: build +name: Python CI + on: - workflow_call: + push: + branches: + - main + pull_request: jobs: - # Build python wheels - build: - name: Build wheels on ${{ matrix.os }} +# Run unittests + test: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: - os: - [ - macos-latest, - ubuntu-latest, - ubuntu-24.04-arm, - windows-latest, - windows-11-arm, - ] + os: [ubuntu-latest, macos-latest, windows-latest] + python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] + parser: [pure, native] + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - if: ${{ matrix.parser == 'native' }} + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + - run: >- + echo LIBCST_PARSER_TYPE=${{ matrix.parser }} >> $GITHUB_ENV + - name: Run Tests + run: python setup.py test + +# Run linters + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - run: flake8 + - run: ufmt check . + - run: python3 -m fixit.cli.run_rules + +# Run pyre typechecker + typecheck: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - name: Make sure Pyre uses the working copy + run: pip install -e . + - run: pyre --version + - run: pyre -n check + - run: python libcst/tests/test_pyre_integration.py + - run: git diff --exit-code + +# Upload test coverage + coverage: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - name: Generate Coverage + run: | + coverage run setup.py test + coverage xml -i + - uses: codecov/codecov-action@v2 + with: + files: coverage.xml + fail_ci_if_error: true + verbose: true + - name: Archive Coverage + uses: actions/upload-artifact@v2 + with: + name: coverage + path: coverage.xml + +# Build the docs + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - uses: ts-graphviz/setup-graphviz@v1 + - run: sphinx-build docs/source/ docs/build/ + - name: Archive Docs + uses: actions/upload-artifact@v2 + with: + name: sphinx-docs + path: docs/build + +# Build python package + build: + name: Build wheels on ${{ matrix.os }}/${{ matrix.vers }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - vers: i686 + os: ubuntu-20.04 + # aarch64 seems to be stuck + # - vers: aarch64 + # os: ubuntu-20.04 + - vers: auto64 + os: ubuntu-20.04 + - vers: arm64 + os: macos-10.15 + - vers: auto64 + os: macos-10.15 + - vers: auto64 + os: windows-2019 env: SCCACHE_VERSION: 0.2.13 - GITHUB_WORKSPACE: "${{github.workspace}}" + CIBW_BEFORE_ALL_LINUX: "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y" + CIBW_BEFORE_ALL_MACOS: "rustup target add aarch64-apple-darwin x86_64-apple-darwin" + CIBW_BEFORE_ALL_WINDOWS: "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc" + CIBW_ENVIRONMENT: 'PATH="$PATH:$HOME/.cargo/bin" LIBCST_NO_LOCAL_SCHEME=$LIBCST_NO_LOCAL_SCHEME' + CIBW_SKIP: "cp27-* cp34-* cp35-* pp* *-win32 *-win_arm64 *-musllinux_*" + CIBW_ARCHS: ${{ matrix.vers }} + CIBW_BUILD_VERBOSITY: 1 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v1 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt + - name: Disable scmtools local scheme + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} + run: >- + echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV + - name: Build wheels + uses: pypa/cibuildwheel@v2.3.1 + - uses: actions/upload-artifact@v2 + with: + path: wheelhouse/*.whl + name: wheels + + pypi: + if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} + name: Upload wheels to pypi + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/checkout@v1 + - name: Download binary wheels + id: download + uses: actions/download-artifact@v2 with: - fetch-depth: 0 - persist-credentials: false - - uses: actions/setup-python@v6 + name: wheels + path: wheelhouse + - uses: actions/setup-python@v2 with: - python-version: "3.12" - - uses: dtolnay/rust-toolchain@stable + python-version: "3.10" + - uses: actions/cache@v2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements.txt', 'requirements-dev.txt', 'setup.py') }} + - name: Install Dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install --upgrade --upgrade-strategy eager build -r requirements.txt -r requirements-dev.txt - name: Disable scmtools local scheme - if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} run: >- echo LIBCST_NO_LOCAL_SCHEME=1 >> $GITHUB_ENV - - name: Enable building wheels for pre-release CPython versions - if: github.event_name != 'release' - run: echo CIBW_ENABLE=cpython-prerelease >> $GITHUB_ENV - - name: Build wheels - uses: pypa/cibuildwheel@v3.2.1 - - uses: actions/upload-artifact@v4 + - name: Build a source tarball + run: >- + python -m + build + --sdist + --outdir ${{ steps.download.outputs.download-path }} + - name: Publish distribution 📦 to Test PyPI + uses: pypa/gh-action-pypi-publish@release/v1 with: - path: wheelhouse/*.whl - name: wheels-${{matrix.os}} + user: __token__ + password: ${{ secrets.TEST_PYPI_API_TOKEN }} + repository_url: https://test.pypi.org/legacy/ + packages_dir: ${{ steps.download.outputs.download-path }} + +# Test rust parts + native: + name: Rust unit tests + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + toolchain: stable + components: rustfmt, clippy + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: test + uses: actions-rs/cargo@v1 + with: + command: test + args: --manifest-path=native/Cargo.toml --release + - name: clippy + uses: actions-rs/clippy-check@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + args: --manifest-path=native/Cargo.toml --all-features + + rustfmt: + name: Rustfmt + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + - run: rustup component add rustfmt + - uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all --manifest-path=native/Cargo.toml -- --check diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index dd3665ad..00000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,142 +0,0 @@ -name: CI - -on: - push: - branches: - - main - pull_request: - -permissions: {} - -jobs: - test: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [macos-latest, ubuntu-latest, windows-latest] - python-version: - - "3.9" - - "3.10" - - "3.11" - - "3.12" - - "3.13" - - "3.13t" - - "3.14" - - "3.14t" - steps: - - name: Install uv - uses: astral-sh/setup-uv@v7 - with: - version: "0.7.13" - python-version: ${{ matrix.python-version }} - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - persist-credentials: false - - uses: dtolnay/rust-toolchain@stable - - name: Build LibCST - run: uv sync --locked --dev - - name: Native Parser Tests - run: uv run poe test - - name: Coverage - run: uv run coverage report - - # Run linters - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - persist-credentials: false - - name: Install uv - uses: astral-sh/setup-uv@v7 - with: - version: "0.7.13" - python-version: "3.10" - - run: uv run poe lint - - run: uv run poe fixtures - - # Run pyre typechecker - typecheck: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - persist-credentials: false - - name: Install uv - uses: astral-sh/setup-uv@v7 - with: - version: "0.7.13" - python-version: "3.10" - - run: uv run poe typecheck - - # Build the docs - docs: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - persist-credentials: false - - name: Install uv - uses: astral-sh/setup-uv@v7 - with: - version: "0.7.13" - python-version: "3.10" - - uses: ts-graphviz/setup-graphviz@v2 - - run: uv run --group docs poe docs - - name: Archive Docs - uses: actions/upload-artifact@v4 - with: - name: sphinx-docs - path: docs/build - - # Test rust parts - native: - name: Rust unit tests - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ["3.10", "3.13t"] - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - uses: dtolnay/rust-toolchain@stable - with: - components: rustfmt, clippy - - uses: actions/setup-python@v6 - with: - python-version: ${{ matrix.python-version }} - - name: test - run: cargo test --manifest-path=native/Cargo.toml --release - - name: test without python - if: matrix.os == 'ubuntu-latest' - run: cargo test --manifest-path=native/Cargo.toml --release --no-default-features - - name: clippy - run: cargo clippy --manifest-path=native/Cargo.toml --all-targets --all-features - - name: compile-benchmarks - run: cargo bench --manifest-path=native/Cargo.toml --no-run - - rustfmt: - name: Rustfmt - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - uses: dtolnay/rust-toolchain@stable - with: - components: rustfmt - - run: rustup component add rustfmt - - name: format - run: cargo fmt --all --manifest-path=native/Cargo.toml -- --check - build: - # only trigger here for pull requests - regular pushes are handled in pypi_upload - if: ${{ github.event_name == 'pull_request' }} - uses: Instagram/LibCST/.github/workflows/build.yml@main diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml deleted file mode 100644 index 04434a24..00000000 --- a/.github/workflows/pypi_upload.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: pypi_upload - -on: - release: - types: [published] - push: - branches: [main] - -permissions: - contents: read - -jobs: - build: - uses: Instagram/LibCST/.github/workflows/build.yml@main - upload_release: - name: Upload wheels to pypi - runs-on: ubuntu-latest - needs: build - permissions: - id-token: write - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - persist-credentials: false - - name: Download binary wheels - id: download - uses: actions/download-artifact@v5 - with: - pattern: wheels-* - path: wheelhouse - merge-multiple: true - - uses: actions/setup-python@v6 - with: - python-version: "3.10" - - name: Install uv - uses: astral-sh/setup-uv@v7 - with: - version: "0.7.13" - enable-cache: false - - name: Build a source tarball - env: - LIBCST_NO_LOCAL_SCHEME: 1 - OUTDIR: ${{ steps.download.outputs.download-path }} - run: >- - uv run python -m - build - --sdist - --outdir "$OUTDIR" - - name: Publish distribution 📦 to Test PyPI - if: github.event_name == 'push' - uses: pypa/gh-action-pypi-publish@release/v1 - with: - repository-url: https://test.pypi.org/legacy/ - packages-dir: ${{ steps.download.outputs.download-path }} - - name: Publish distribution 📦 to PyPI - if: github.event_name == 'release' - uses: pypa/gh-action-pypi-publish@release/v1 - with: - packages-dir: ${{ steps.download.outputs.download-path }} diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml deleted file mode 100644 index 47fdfe00..00000000 --- a/.github/workflows/zizmor.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: GitHub Actions Security Analysis with zizmor 🌈 - -on: - push: - branches: ["main"] - pull_request: - branches: ["**"] - -jobs: - zizmor: - name: zizmor latest via PyPI - runs-on: ubuntu-latest - permissions: - security-events: write - contents: read - actions: read - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Install the latest version of uv - uses: astral-sh/setup-uv@v7 - - - name: Run zizmor 🌈 - run: uvx zizmor --format sarif . > results.sarif - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@v4 - with: - sarif_file: results.sarif - category: zizmor \ No newline at end of file diff --git a/.gitignore b/.gitignore index 004ebb4c..9bb9370a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ *.swp *.swo *.pyc -*.pyd *.pyo *.so *.egg-info/ @@ -18,6 +17,3 @@ libcst/_version.py .hypothesis/ .python-version target/ -venv/ -.venv/ -.idea/ diff --git a/.pyre_configuration b/.pyre_configuration index cf108076..ae37b031 100644 --- a/.pyre_configuration +++ b/.pyre_configuration @@ -2,9 +2,6 @@ "exclude": [ ".*\/native\/.*" ], - "ignore_all_errors": [ - ".venv" - ], "source_directories": [ "." ], diff --git a/.readthedocs.yml b/.readthedocs.yml index bb6eb608..c76ca987 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,18 +5,12 @@ sphinx: formats: all -build: - os: ubuntu-20.04 - tools: - python: "3" - rust: "1.70" - apt_packages: - - graphviz - python: + version: 3.7 install: + - requirements: requirements.txt + - requirements: requirements-dev.txt - method: pip path: . - extra_requirements: - - dev + system_packages: true diff --git a/CHANGELOG.md b/CHANGELOG.md index f72d53f0..a54d67cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,491 +1,3 @@ -# 1.8.6 - 2025-11-03 - -## What's Changed -* Update pyproject.toml for 3.14t by @itamaro in https://github.com/Instagram/LibCST/pull/1417 -* Update PyO3 to 0.26 by @cjwatson in https://github.com/Instagram/LibCST/pull/1413 -* Make CodemodCommand's supported_transforms order deterministic by @frvnkliu in https://github.com/Instagram/LibCST/pull/1424 - -## New Contributors -* @cjwatson made their first contribution in https://github.com/Instagram/LibCST/pull/1413 -* @frvnkliu made their first contribution in https://github.com/Instagram/LibCST/pull/1424 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.5...v1.8.6 - -# 1.8.5 - 2025-09-25 - -## What's Changed -* fixed: circular import error by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1406 - - -# 1.8.4 - 2025-09-09 - -## What's Changed -* fixed: generate Attribute nodes when applying type annotations by @tungol in https://github.com/Instagram/LibCST/pull/1396 -* added: Support parsing of t-strings #1374 by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1398 -* added: add support for PEP758 by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1401 - -## New Contributors -* @tungol made their first contribution in https://github.com/Instagram/LibCST/pull/1396 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.2...v1.8.4 - -# 1.8.3 - 2025-08-29 -## What's Changed -* removed: remove entry points to pure parser by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1375 -* fixed: fixes match statements to work with PositionProvider by @imsut in https://github.com/Instagram/LibCST/pull/1389 - - -## New Contributors -* @hunterhogan made their first contribution in https://github.com/Instagram/LibCST/pull/1378 -* @thomas-serre-sonarsource made their first contribution in https://github.com/Instagram/LibCST/pull/1379 -* @imsut made their first contribution in https://github.com/Instagram/LibCST/pull/1389 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.2...v1.8.3 - -# 1.8.2 - 2025-06-13 - -# Fixed -* fix(dependency): add back typing-extensions for 3.9 by @Lee-W in https://github.com/Instagram/LibCST/pull/1358 - -## New Contributors -* @Lee-W made their first contribution in https://github.com/Instagram/LibCST/pull/1358 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.1...v1.8.2 - -# 1.8.1 - 2025-06-10 - -## Added -* add helper to convert nodes to matchers by @zsol in https://github.com/Instagram/LibCST/pull/1351 - -## Updated -* Avoid raising bare Exception by @zaicruvoir1rominet in https://github.com/Instagram/LibCST/pull/1168 -* Upgrade PyYAML-ft version and use new module name by @lysnikolaou in https://github.com/Instagram/LibCST/pull/1353 - -## New Contributors -* @lysnikolaou made their first contribution in https://github.com/Instagram/LibCST/pull/1353 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.0...v1.8.1 - -# 1.8.0 - 2025-05-27 - -## Added -* Allow configuring empty formatter lists in codemod CLI by @ngoldbaum in https://github.com/Instagram/LibCST/pull/1319 -* Publish several new binary wheels - * macos intel by @hadialqattan in https://github.com/Instagram/LibCST/pull/1316 - * windows arm64 by @zsol in https://github.com/Instagram/LibCST/pull/1304 - * 3.13 CPython free-threaded by @zsol in https://github.com/Instagram/LibCST/pull/1333 - * (only on [test.pypi.org](https://test.pypi.org/project/libcst/#history)) 3.14 and 3.14 CPython free-threaded by @amyreese and @zsol in https://github.com/Instagram/LibCST/pull/1345 and https://github.com/Instagram/LibCST/pull/1331 -* Enable support for free-threaded CPython by @zsol in https://github.com/Instagram/LibCST/pull/1295 and https://github.com/Instagram/LibCST/pull/1335 - -## Updated -* update pyo3 to 0.25 by @ngoldbaum in https://github.com/Instagram/LibCST/pull/1324 -* Replace multiprocessing with ProcessPoolExecutor by @zsol in https://github.com/Instagram/LibCST/pull/1294 -* Support pipe syntax for Union types in codegen by @zsol in https://github.com/Instagram/LibCST/pull/1336 - -## New Contributors -* @hadialqattan made their first contribution in https://github.com/Instagram/LibCST/pull/1316 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.7.0...v1.8.0 - -# 1.7.0 - 2025-03-13 - -## Added -* add free-threaded CI by @ngoldbaum in https://github.com/Instagram/LibCST/pull/1312 - -## Updated -* Remove dependency on `chic` and upgrade `annotate-snippets` by @zanieb in https://github.com/Instagram/LibCST/pull/1293 -* Update for Pyo3 0.23 by @ngoldbaum in https://github.com/Instagram/LibCST/pull/1289 -* Bump PyO3 to 0.23.5 by @mgorny in https://github.com/Instagram/LibCST/pull/1311 - -## New Contributors -* @zanieb made their first contribution in https://github.com/Instagram/LibCST/pull/1293 -* @ngoldbaum made their first contribution in https://github.com/Instagram/LibCST/pull/1289 -* @mgorny made their first contribution in https://github.com/Instagram/LibCST/pull/1311 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.6.0...v1.7.0 - -# 1.6.0 - 2025-01-09 - -## Fixed - -* rename: store state in scratch by @zsol in https://github.com/Instagram/LibCST/pull/1250 -* rename: handle imports via a parent module by @zsol in https://github.com/Instagram/LibCST/pull/1251 -* rename: Fix imports with aliases by @zsol in https://github.com/Instagram/LibCST/pull/1252 -* rename: don't leave trailing commas by @zsol in https://github.com/Instagram/LibCST/pull/1254 -* rename: don't eat commas unnecessarily by @zsol in https://github.com/Instagram/LibCST/pull/1256 -* rename: fix renaming toplevel names by @zsol in https://github.com/Instagram/LibCST/pull/1260 -* bump 3.12 to 3.13 in readme by @khameeteman in https://github.com/Instagram/LibCST/pull/1228 - -## Added - -* Add codemod to convert `typing.Union` to `|` by @yangdanny97 in https://github.com/Instagram/LibCST/pull/1270 -* Add codemod to fix variadic callable annotations by @yangdanny97 in https://github.com/Instagram/LibCST/pull/1269 -* Add codemod to rename typing aliases of builtins by @yangdanny97 in https://github.com/Instagram/LibCST/pull/1267 -* Add typing classifier to pyproject.toml and badge to README by @yangdanny97 in https://github.com/Instagram/LibCST/pull/1272 -* Expose TypeAlias and TypeVar related structs in rust library by @Crozzers in https://github.com/Instagram/LibCST/pull/1274 - -## Updated -* Upgrade pyo3 to 0.22 by @jelmer in https://github.com/Instagram/LibCST/pull/1180 - -## New Contributors -* @yangdanny97 made their first contribution in https://github.com/Instagram/LibCST/pull/1270 -* @Crozzers made their first contribution in https://github.com/Instagram/LibCST/pull/1274 -* @jelmer made their first contribution in https://github.com/Instagram/LibCST/pull/1180 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.5.1...v1.6.0 - -# 1.5.1 - 2024-11-18 - -## Added - -* build wheels for musllinux by @MrMino in https://github.com/Instagram/LibCST/pull/1243 - -## New Contributors -* @MrMino made their first contribution in https://github.com/Instagram/LibCST/pull/1243 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.5.0...v1.5.1 - -# 1.5.0 - 2024-10-10 - -## Added -* FullyQualifiedNameProvider: Optionally consider pyproject.toml files when determining a file's module name and package by @camillol in https://github.com/Instagram/LibCST/pull/1148 -* Add validation for If node by @kiri11 in https://github.com/Instagram/LibCST/pull/1177 -* include python 3.13 in build by @khameeteman in https://github.com/Instagram/LibCST/pull/1203 - -## Fixed -* fix various Match statement visitation errors by @zsol in https://github.com/Instagram/LibCST/pull/1161 -* Mention codemod -x flag in docs by @kiri11 in https://github.com/Instagram/LibCST/pull/1169 -* Clear warnings for each file in codemod cli by @kiri11 in https://github.com/Instagram/LibCST/pull/1184 -* Typo fix in codemods_tutorial.rst (trivial) by @wimglenn in https://github.com/Instagram/LibCST/pull/1208 -* fix certain matchers breaking under multiprocessing by initializing them late by @kiri11 in https://github.com/Instagram/LibCST/pull/1204 - -## Updated -* make libcst_native::tokenizer public by @zsol in https://github.com/Instagram/LibCST/pull/1182 -* Use `license` instead of `license-file` by @michel-slm in https://github.com/Instagram/LibCST/pull/1189 -* Drop codecov from CI and readme by @amyreese in https://github.com/Instagram/LibCST/pull/1192 - - -## New Contributors -* @kiri11 made their first contribution in https://github.com/Instagram/LibCST/pull/1169 -* @grievejia made their first contribution in https://github.com/Instagram/LibCST/pull/1174 -* @michel-slm made their first contribution in https://github.com/Instagram/LibCST/pull/1189 -* @wimglenn made their first contribution in https://github.com/Instagram/LibCST/pull/1208 -* @khameeteman made their first contribution in https://github.com/Instagram/LibCST/pull/1203 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.4.0...v1.5.0 - -# 1.4.0 - 2024-05-22 - -## Fixed -* Fix Literal parse error in RemoveImportsVisitor by @camillol in https://github.com/Instagram/LibCST/pull/1130 -* Don't reset context.scratch between files by @zsol in https://github.com/Instagram/LibCST/pull/1151 -* Various documentation fixes - * Typo fix FullRepoManager by @kit1980 in https://github.com/Instagram/LibCST/pull/1138 - * ✏️ Fix tiny typo in `docs/source/metadata.rst` by @tiangolo in https://github.com/Instagram/LibCST/pull/1134 - * ✏️ Fix typo in `docs/source/scope_tutorial.ipynb` by @tiangolo in https://github.com/Instagram/LibCST/pull/1135 - * Update CONTRIBUTING.md by @zaicruvoir1rominet in https://github.com/Instagram/LibCST/pull/1142 - -## Added - -* Add helper functions for common ways of filtering nodes by @zaicruvoir1rominet in https://github.com/Instagram/LibCST/pull/1137 -* Dump CST to .dot (graphviz) files by @zaicruvoir1rominet in https://github.com/Instagram/LibCST/pull/1147 -* Implement PEP-696 by @thereversiblewheel in https://github.com/Instagram/LibCST/pull/1141 - -## New Contributors -* @tiangolo made their first contribution in https://github.com/Instagram/LibCST/pull/1134 -* @camillol made their first contribution in https://github.com/Instagram/LibCST/pull/1130 -* @zaicruvoir1rominet made their first contribution in https://github.com/Instagram/LibCST/pull/1142 -* @thereversiblewheel made their first contribution in https://github.com/Instagram/LibCST/pull/1141 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.3.1...v1.4.0 - -# 1.3.1 - 2024-04-03 - -## Fixed -* ImportError due to missing `mypy_extensions` dependency by @zsol in https://github.com/Instagram/LibCST/pull/1128 - -# 1.3.0 - 2024-04-03 - -## Updated -* Removed dependencies on `typing_extensions` and `typing_inspect` by @zsol in https://github.com/Instagram/LibCST/pull/1126 - -# 1.2.0 - 2024-02-19 - -## Updated -* Support running LibCST on Python 3.12 and drop support for running it on 3.8 - * remove 3.8 support by @zsol in https://github.com/Instagram/LibCST/pull/1073 - * Remove reference to distutils by @zsol in https://github.com/Instagram/LibCST/pull/1099 - * Update pyproject.toml for Python 3.12 support by @itamaro in https://github.com/Instagram/LibCST/pull/1038 - -## Added -* Allow `Element::codegen` to be used by external users by @Wilfred in https://github.com/Instagram/LibCST/pull/1071 - -## Fixed -* Fix parsing list matchers without explicit brackets by @zsol in https://github.com/Instagram/LibCST/pull/1097 -* installing rustc/cargo for mybinder demo by @aleivag in https://github.com/Instagram/LibCST/pull/1083 -* fix filepathprovider generic type by @kinto0 in https://github.com/Instagram/LibCST/pull/1036 - -## New Contributors -* @itamaro made their first contribution in https://github.com/Instagram/LibCST/pull/1039 -* @kinto0 made their first contribution in https://github.com/Instagram/LibCST/pull/1036 -* @dtolnay made their first contribution in https://github.com/Instagram/LibCST/pull/1063 -* @anonymousdouble made their first contribution in https://github.com/Instagram/LibCST/pull/1082 -* @aleivag made their first contribution in https://github.com/Instagram/LibCST/pull/1083 -* @Wilfred made their first contribution in https://github.com/Instagram/LibCST/pull/1071 -* @diliop made their first contribution in https://github.com/Instagram/LibCST/pull/1106 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.1.0...v1.2.0 - -# 1.1.0 - 2023-10-05 - -## Added -* PEP 695 support - * parser: PEP 695 - Type Parameter Syntax #1004 - * Scope provider: support for type annotations #1014 -* PEP 701 support - * parser: support arbitrarily nested f-strings #1026 - * parser: Parse multiline expressions in f-strings #1027 -* parser: Support files with mixed newlines #1007 -* [libcst](https://crates.io/crates/libcst) is now published to crates.io - -## Fixed -* codemod/ApplyTypeAnnotationsVisitor: Do not annotate the same variable multiple times #956 -* parser: Don't swallow trailing whitespace #976 -* codemod/rename: Avoid duplicating import statements when the module name doesn't change #981 - -## Updated -* cli: Don't gather dirs ending .py #994 -* drop support for Python 3.7 #997 -* A few parser performance improvements: - * Switch to using thread_local regular expressions to stop mutext contention #996 - * Remove need for regex in TextPosition::matches #1002 - * Remove Regexes from whitespace parser #1008 - -# 1.0.1 - 2023-06-07 - -## Fixed -* Fix type of `evaluated_value` on string to allow bytes by @ljodal in https://github.com/Instagram/LibCST/pull/721 -* Fix Sentinal typo by @kit1980 in https://github.com/Instagram/LibCST/pull/948 -* Allow no whitespace after lambda body in certain cases by @zsol in https://github.com/Instagram/LibCST/pull/939 -* Fix whitespace, fstring, walrus related parse errors (#939, #938, #937, -#936, #935, #934, #933, #932, #931) by @zsol in https://github.com/Instagram/LibCST/pull/940 -* Codemod CLI: Print diff only when there is a change by @kit1980 in https://github.com/Instagram/LibCST/pull/945 - -## New Contributors -* @ljodal made their first contribution in https://github.com/Instagram/LibCST/pull/721 -* @kit1980 made their first contribution in https://github.com/Instagram/LibCST/pull/948 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.0.0...v1.0.1 - -# 1.0.0 - 2023-05-25 - -The first major release of LibCST is essentially the same as 0.4.10, but using the -newer, Rust-based parser implementation by default. The old, pure Python parser is -scheduled for removal in the next (non-patch) release. Until then, it is available with -the `LIBCST_PARSER_TYPE` environment variable set to `pure`. - -## Updated - -* Switch the default parser implementation to native by @zsol in https://github.com/Instagram/LibCST/pull/929 - -# 0.4.10 - 2023-05-23 - -## New Contributors -* @and-semakin made their first contribution in https://github.com/Instagram/LibCST/pull/816 -* @carljm made their first contribution in https://github.com/Instagram/LibCST/pull/828 -* @sagarbadiyani made their first contribution in https://github.com/Instagram/LibCST/pull/841 -* @podtserkovskiy made their first contribution in https://github.com/Instagram/LibCST/pull/894 -* @rchen152 made their first contribution in https://github.com/Instagram/LibCST/pull/903 -* @Kludex made their first contribution in https://github.com/Instagram/LibCST/pull/913 -* @jakkdl made their first contribution in https://github.com/Instagram/LibCST/pull/921 - -## Added -* Add py3.11 classifier by @and-semakin in https://github.com/Instagram/LibCST/pull/816 -* Script to regenerate test fixtures, upgrade to Pyre 0.9.10 by @amyreese in https://github.com/Instagram/LibCST/pull/872 -* Allow FullyQualifiedNameProvider to work with absolute paths by @amyreese in https://github.com/Instagram/LibCST/pull/867 -* Allow running codemods without configuring in YAML by @akx in https://github.com/Instagram/LibCST/pull/879 -* Support PEP 604 in ApplyTypeAnnotationsVisitor by @hauntsaninja in https://github.com/Instagram/LibCST/pull/868 - -## Fixed -* fix PEP 604 union annotations in decorators by @carljm in https://github.com/Instagram/LibCST/pull/828 -* [AddImportsVisitor] Docstring Check Only for the Top Element of the Body by @sagarbadiyani in https://github.com/Instagram/LibCST/pull/841 -* Fix [#855](https://github.com/Instagram/LibCST/issues/855) - fail to parse with statement by @stroxler in https://github.com/Instagram/LibCST/pull/861 -* Add setuptools-rust to build requirements in setup.py by @amyreese in https://github.com/Instagram/LibCST/pull/873 -* Relative imports from '' package are not allowed by @podtserkovskiy in https://github.com/Instagram/LibCST/pull/894 -* Use subprocess.DEVNULL instead of opening os.devnull by hand by @akx in https://github.com/Instagram/LibCST/pull/897 -* Ensure current Python interpreter is used for subprocesses by @akx in https://github.com/Instagram/LibCST/pull/898 -* Fix ApplyTypeAnnotationsVisitor behavior on attribute assignments. by @rchen152 in https://github.com/Instagram/LibCST/pull/903 -* Fix spelling and grammar in some comments by @stroxler in https://github.com/Instagram/LibCST/pull/908 -* skip escaped backslash in rf-string by @jakkdl in https://github.com/Instagram/LibCST/pull/921 -* relax validation rules on decorators by @jakkdl in https://github.com/Instagram/LibCST/pull/926 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v0.4.9...v0.4.10 - -# 0.4.9 - 2022-11-10 - -## Updated -* Bump setuptools-rust version by @zsol in https://github.com/Instagram/LibCST/pull/809 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v0.4.8...v0.4.9 - -# 0.4.8 - 2022-11-10 - -## New Contributors -* @dhruvmanila made their first contribution in https://github.com/Instagram/LibCST/pull/728 -* @vfazio made their first contribution in https://github.com/Instagram/LibCST/pull/801 -* @matthewshaer made their first contribution in https://github.com/Instagram/LibCST/pull/807 - - -## Fixed -* Fix parse error message for number parsing by @zzl0 in https://github.com/Instagram/LibCST/pull/724 -* Fix problematic doc build, due to the new builder image provided by readthedocs doesn't has the `graphviz-dev` package pre-installed any more by @MapleCCC in https://github.com/Instagram/LibCST/pull/751 -* Fix docstring of `FullRepoManager` by @MapleCCC in https://github.com/Instagram/LibCST/pull/750 -* Fix bug when `TypeOf` is one of options in `OneOf` / `AllOf` by @MapleCCC in https://github.com/Instagram/LibCST/pull/756 -* Tighten the metadata type of `ExpressionContextProvider` by @MapleCCC in https://github.com/Instagram/LibCST/pull/760 -* Fix the bug that the use of formatter in codemods has undetermined target Python version, resulting in hard-to-reason-with behavior by @MapleCCC in https://github.com/Instagram/LibCST/pull/771 - - -## Added -* Python 3.11 rutime support - * test using python 3.11 beta versions by @zsol in https://github.com/Instagram/LibCST/pull/723 - * Python 3.11 wheels by @vfazio in https://github.com/Instagram/LibCST/pull/801 -* Raise informative exception when metadata is unresolved in a metadata-based match by @MapleCCC in https://github.com/Instagram/LibCST/pull/757 -* Add AccessorProvider by @matthewshaer in https://github.com/Instagram/LibCST/pull/807 - -**Full Changelog**: https://github.com/Instagram/LibCST/compare/v0.4.7...v0.4.8 - -# 0.4.7 - 2022-07-12 - -## New Contributors -* @Chenguang-Zhu made their first contribution in https://github.com/Instagram/LibCST/pull/720 - -## Fixed -* Fix get_qualified_names_for matching on prefixes of the given name by @lpetre in https://github.com/Instagram/LibCST/pull/719 - -## Added -* Implement lazy loading mechanism for expensive metadata providers by @Chenguang-Zhu in https://github.com/Instagram/LibCST/pull/720 - -# 0.4.6 - 2022-07-04 - -## New Contributors -- @superbobry made their first contribution in https://github.com/Instagram/LibCST/pull/702 - -## Fixed -- convert_type_comments now preserves comments following type comments by @superbobry in https://github.com/Instagram/LibCST/pull/702 -- QualifiedNameProvider optimizations - - Cache the scope name prefix to prevent scope traversal in a tight loop by @lpetre in https://github.com/Instagram/LibCST/pull/708 - - Faster qualified name formatting by @lpetre in https://github.com/Instagram/LibCST/pull/710 - - Prevent unnecessary work in Scope.get_qualified_names_for_ by @lpetre in https://github.com/Instagram/LibCST/pull/709 -- Fix parsing of parenthesized empty tuples by @zsol in https://github.com/Instagram/LibCST/pull/712 -- Support whitespace after ParamSlash by @zsol in https://github.com/Instagram/LibCST/pull/713 -- [parser] bail on deeply nested expressions by @zsol in https://github.com/Instagram/LibCST/pull/718 - -# 0.4.5 - 2022-06-17 - -## New Contributors - -- @zzl0 made their first contribution in https://github.com/Instagram/LibCST/pull/704 - -## Fixed - -- Only skip supported escaped characters in f-strings by @zsol in https://github.com/Instagram/LibCST/pull/700 -- Escaping quote characters in raw string literals causes a tokenizer error by @zsol in https://github.com/Instagram/LibCST/issues/668 -- Corrected a code example in the documentation by @zzl0 in https://github.com/Instagram/LibCST/pull/703 -- Handle multiline strings that start with quotes by @zzl0 in https://github.com/Instagram/LibCST/pull/704 -- Fixed a performance regression in libcst.metadata.ScopeProvider by @lpetre in https://github.com/Instagram/LibCST/pull/698 - -# 0.4.4 - 2022-06-13 - -## New Contributors - -- @adamchainz made their first contribution in https://github.com/Instagram/LibCST/pull/688 - -## Added - -- Add package links to PyPI by @adamchainz in https://github.com/Instagram/LibCST/pull/688 -- native: add overall benchmark by @zsol in https://github.com/Instagram/LibCST/pull/692 -- Add support for PEP-646 by @zsol in https://github.com/Instagram/LibCST/pull/696 - -## Updated - -- parser: use references instead of smart pointers for Tokens by @zsol in https://github.com/Instagram/LibCST/pull/691 - -# 0.4.3 - 2022-05-11 - -## Fixed - -- Restore the 0.4.1 behavior for libcst.helpers.get_absolute_module by @lpetre in https://github.com/Instagram/LibCST/pull/684 - -# 0.4.2 - 2022-05-04 - -## New Contributors - -- @stanislavlevin made their first contribution in https://github.com/Instagram/LibCST/pull/650 -- @dmitryvinn made their first contribution in https://github.com/Instagram/LibCST/pull/655 -- @wiyr made their first contribution in https://github.com/Instagram/LibCST/pull/669 -- @toofar made their first contribution in https://github.com/Instagram/LibCST/pull/675 - -## Fixed - -- native: Avoid crashing by making IntoPy conversion fallible by @zsol in https://github.com/Instagram/LibCST/pull/639 -- native: make sure ParserError's line is zero-indexed by @zsol in https://github.com/Instagram/LibCST/pull/681 -- Fix space validation for AsName and Await by @zsol in https://github.com/Instagram/LibCST/pull/641 -- Qualified Name Provider: Fix returned qname for symbols that are prefixes of each other by @wiyr in https://github.com/Instagram/LibCST/pull/669 -- Rename Codemod: Correct last renamed import from by @toofar in https://github.com/Instagram/LibCST/pull/675 -- Many changes to the Apply Type Comments codemod: - - Allow for skipping quotes when applying type comments by @stroxler in https://github.com/Instagram/LibCST/pull/644 - - Port pyre fixes by @stroxler in https://github.com/Instagram/LibCST/pull/651 - - Preserve as-imports when merging type annotations. by @martindemello in https://github.com/Instagram/LibCST/pull/664 - - Qualify imported symbols when the dequalified form would cause a conflict by @martindemello in https://github.com/Instagram/LibCST/pull/674 - - Add an argument to always qualify imported type annotations. by @martindemello in https://github.com/Instagram/LibCST/pull/676 - -## Added - -- Create an AddTrailingCommas codemod by @stroxler in https://github.com/Instagram/LibCST/pull/643 -- Define gather global names visitor by @shannonzhu in https://github.com/Instagram/LibCST/pull/657 - -## Updated - -- Support module and package names in the codemod context by @lpetre in https://github.com/Instagram/LibCST/pull/662 -- Drop support for running libcst using a python 3.6 interpreter by @lpetre in https://github.com/Instagram/LibCST/pull/663 -- Update relative import logic to match cpython by @lpetre in https://github.com/Instagram/LibCST/pull/660 -- Scope Provider: Consider access information when computing qualified names for nodes by @lpetre in https://github.com/Instagram/LibCST/pull/682 - -# 0.4.1 - 2022-01-28 - -## New Contributors - -- @ariebovenberg made their first contribution in https://github.com/Instagram/LibCST/pull/605 -- @sehz made their first contribution in https://github.com/Instagram/LibCST/pull/598 - -## Added - -- Add docs about the native parts by @zsol in https://github.com/Instagram/LibCST/pull/601 -- Specify minimum rust toolchain version by @zsol in https://github.com/Instagram/LibCST/pull/614 -- build wheels on main branch for linux/arm64 by @zsol in https://github.com/Instagram/LibCST/pull/630 - -## Updated - -- ApplyTypeAnnotationVisitor changes - - Add support for methods with func type comment excluding self/cls by @stroxler in https://github.com/Instagram/LibCST/pull/622 - - Merge in TypeVars and Generic base classes in ApplyTypeAnnotationVisitor by @martindemello in https://github.com/Instagram/LibCST/pull/596 - - Full handling for applying type comments to Assign by @stroxler in https://github.com/Instagram/LibCST/pull/599 - - Add support for For and With by @stroxler in https://github.com/Instagram/LibCST/pull/607 - - Support FunctionDef transformations by @stroxler in https://github.com/Instagram/LibCST/pull/610 -- change pyo3 as optional dependency in native Python Parser by @sehz in https://github.com/Instagram/LibCST/pull/598 -- add slots to base classes, @add_slots takes bases into account by @ariebovenberg in https://github.com/Instagram/LibCST/pull/605 -- [native] Box most enums by @zsol in https://github.com/Instagram/LibCST/pull/632 -- [native] Return tuples instead of lists in CST nodes by @zsol in https://github.com/Instagram/LibCST/pull/631 - -## Fixed - -- Allow trailing whitespace without newline at EOF by @zsol in https://github.com/Instagram/LibCST/pull/611 -- Handle ast.parse failures when converting function type comments by @stroxler in https://github.com/Instagram/LibCST/pull/616 -- [native] Don't redundantly nest StarredElement inside another Element by @isidentical in https://github.com/Instagram/LibCST/pull/624 -- [native] Allow unparenthesized tuples inside f-strings by @isidentical in https://github.com/Instagram/LibCST/pull/621 -- Don't require whitespace right after match by @isidentical in https://github.com/Instagram/LibCST/pull/628 -- Proxy both parentheses in some pattern matching nodes by @isidentical in https://github.com/Instagram/LibCST/pull/626 - # 0.4.0 - 2022-01-12 This release contains a new parsing infrastructure that is turned off by default. You @@ -497,575 +9,522 @@ Note: the new parser is built as a native extension, so LibCST will ship with bi wheels from now on. ## Added - -- Implement a Python PEG parser in Rust by @zsol in [#566](https://github.com/Instagram/LibCST/pull/566) -- implement PEP-654: except\* by @zsol in [#571](https://github.com/Instagram/LibCST/pull/571) -- Implement PEP-634 - Match statement by @zsol in [#568](https://github.com/Instagram/LibCST/pull/568) -- Add instructions to codegen test failures by @stroxler in [#582](https://github.com/Instagram/LibCST/pull/582) -- Support Parenthesized With Statements by @stroxler in [#584](https://github.com/Instagram/LibCST/pull/584) -- Support relative imports in AddImportsVisitor by @martindemello in [#585](https://github.com/Instagram/LibCST/pull/585) -- Codemod for PEP 484 Assign w / type comments -> PEP 526 AnnAssign by @stroxler in [#594](https://github.com/Instagram/LibCST/pull/594) +* Implement a Python PEG parser in Rust by @zsol in [#566](https://github.com/Instagram/LibCST/pull/566) +* implement PEP-654: except* by @zsol in [#571](https://github.com/Instagram/LibCST/pull/571) +* Implement PEP-634 - Match statement by @zsol in [#568](https://github.com/Instagram/LibCST/pull/568) +* Add instructions to codegen test failures by @stroxler in [#582](https://github.com/Instagram/LibCST/pull/582) +* Support Parenthesized With Statements by @stroxler in [#584](https://github.com/Instagram/LibCST/pull/584) +* Support relative imports in AddImportsVisitor by @martindemello in [#585](https://github.com/Instagram/LibCST/pull/585) +* Codemod for PEP 484 Assign w / type comments -> PEP 526 AnnAssign by @stroxler in [#594](https://github.com/Instagram/LibCST/pull/594) ## Updated - -- Update license headers by @zsol in [#560](https://github.com/Instagram/LibCST/pull/560) -- Use precise signature matching when inserting function type annotations by @martindemello in [#591](https://github.com/Instagram/LibCST/pull/591) +* Update license headers by @zsol in [#560](https://github.com/Instagram/LibCST/pull/560) +* Use precise signature matching when inserting function type annotations by @martindemello in [#591](https://github.com/Instagram/LibCST/pull/591) # 0.3.23 - 2021-11-23 ## Fixed - -- Fix missing string annotation references [#561](https://github.com/Instagram/LibCST/pull/561) +- Fix missing string annotation references [#561](https://github.com/Instagram/LibCST/pull/561) # 0.3.22 - 2021-11-22 ## Added - -- Add --indent-string option to `libcst.tool print` [#525](https://github.com/Instagram/LibCST/pull/525) -- Publish pre-release packages to test.pypi.org [#550](https://github.com/Instagram/LibCST/pull/550) -- Add ImportAssignment class extending Assignment to record assignments for import statements [#554](https://github.com/Instagram/LibCST/pull/554) +- Add --indent-string option to `libcst.tool print` [#525](https://github.com/Instagram/LibCST/pull/525) +- Publish pre-release packages to test.pypi.org [#550](https://github.com/Instagram/LibCST/pull/550) +- Add ImportAssignment class extending Assignment to record assignments for import statements [#554](https://github.com/Instagram/LibCST/pull/554) ## Fixed - -- Various documentation fixes [#527](https://github.com/Instagram/LibCST/pull/527), [#529](https://github.com/Instagram/LibCST/pull/529) -- Do not add imports if we added no type info in ApplyTypeAnnotationVisitor [(commit)](https://github.com/Instagram/LibCST/commit/87625d02b6cb321c9c29ba1c67d81ce954a1a396) -- Support relative imports in ApplyTypeAnnotationVisitor qualifier handling [#538](https://github.com/Instagram/LibCST/pull/538) -- Don't gather metadata if the wrapper already contains it [#545](https://github.com/Instagram/LibCST/pull/545) -- Swallow parsing errors in string annotations [#548](https://github.com/Instagram/LibCST/pull/548) -- Stop parsing string annotations when no longer in a typing call [#546](https://github.com/Instagram/LibCST/pull/546) +- Various documentation fixes [#527](https://github.com/Instagram/LibCST/pull/527), [#529](https://github.com/Instagram/LibCST/pull/529) +- Do not add imports if we added no type info in ApplyTypeAnnotationVisitor [(commit)](https://github.com/Instagram/LibCST/commit/87625d02b6cb321c9c29ba1c67d81ce954a1a396) +- Support relative imports in ApplyTypeAnnotationVisitor qualifier handling [#538](https://github.com/Instagram/LibCST/pull/538) +- Don't gather metadata if the wrapper already contains it [#545](https://github.com/Instagram/LibCST/pull/545) +- Swallow parsing errors in string annotations [#548](https://github.com/Instagram/LibCST/pull/548) +- Stop parsing string annotations when no longer in a typing call [#546](https://github.com/Instagram/LibCST/pull/546) ## Updated - -- Move find_qualified_names_for in the Assignment class [#557](https://github.com/Instagram/LibCST/pull/557) +- Move find_qualified_names_for in the Assignment class [#557](https://github.com/Instagram/LibCST/pull/557) # 0.3.21 - 2021-09-21 ## Fixed - -- Fix pyre command for type inference provider [#523](https://github.com/Instagram/LibCST/pull/523) +- Fix pyre command for type inference provider [#523](https://github.com/Instagram/LibCST/pull/523) ## Updated - -- Change codegen to treat typing.Union[Foo, NoneType] and typing.Optional[Foo] as the same [#508]((https://github.com/Instagram/LibCST/pull/508) -- Rewrite the MatchIfTrue type to be generic on \_MatchIfTrueT [#512](https://github.com/Instagram/LibCST/pull/512) -- Add python3.9 to the CI [#506](https://github.com/Instagram/LibCST/pull/506) -- Various CI changes [#471](https://github.com/Instagram/LibCST/pull/471) [#510](https://github.com/Instagram/LibCST/pull/510) [#505](https://github.com/Instagram/LibCST/pull/505) [#515](https://github.com/Instagram/LibCST/pull/515) [#516](https://github.com/Instagram/LibCST/pull/516) +- Change codegen to treat typing.Union[Foo, NoneType] and typing.Optional[Foo] as the same [#508]((https://github.com/Instagram/LibCST/pull/508) +- Rewrite the MatchIfTrue type to be generic on _MatchIfTrueT [#512](https://github.com/Instagram/LibCST/pull/512) +- Add python3.9 to the CI [#506](https://github.com/Instagram/LibCST/pull/506) +- Various CI changes [#471](https://github.com/Instagram/LibCST/pull/471) [#510](https://github.com/Instagram/LibCST/pull/510) [#505](https://github.com/Instagram/LibCST/pull/505) [#515](https://github.com/Instagram/LibCST/pull/515) [#516](https://github.com/Instagram/LibCST/pull/516) # 0.3.20 - 2021-08-09 ## Fixed - -- Don't reset subprocess environment to fix codemodding on windows [#495](https://github.com/Instagram/LibCST/pull/495) -- TypeAnnotationsVisitor: don't truncate function return type [#499](https://github.com/Instagram/LibCST/pull/499) -- Docs: Fix typo [#492](https://github.com/Instagram/LibCST/pull/492) +- Don't reset subprocess environment to fix codemodding on windows [#495](https://github.com/Instagram/LibCST/pull/495) +- TypeAnnotationsVisitor: don't truncate function return type [#499](https://github.com/Instagram/LibCST/pull/499) +- Docs: Fix typo [#492](https://github.com/Instagram/LibCST/pull/492) # 0.3.19 - 2021-05-12 # Updated - -- Return more specific QNames for assignments [#477](https://github.com/Instagram/LibCST/pull/477) -- Tie accesses from string annotation to the string node [#483](https://github.com/Instagram/LibCST/pull/483) - +- Return more specific QNames for assignments [#477](https://github.com/Instagram/LibCST/pull/477) +- Tie accesses from string annotation to the string node [#483](https://github.com/Instagram/LibCST/pull/483) ## Fixed - -- Fix leaking processes from TypeInferenceProvider [#474](https://github.com/Instagram/LibCST/pull/474) -- Fix TypeInferenceProvider breakage with empty cache [#476](https://github.com/Instagram/LibCST/pull/476) -- Fix formatting for link to QualifiedName class in docs [#480](https://github.com/Instagram/LibCST/pull/480) +- Fix leaking processes from TypeInferenceProvider [#474](https://github.com/Instagram/LibCST/pull/474) +- Fix TypeInferenceProvider breakage with empty cache [#476](https://github.com/Instagram/LibCST/pull/476) +- Fix formatting for link to QualifiedName class in docs [#480](https://github.com/Instagram/LibCST/pull/480) # 0.3.18 - 2021-03-29 ## Added - -- Add FlattenSentinel to support replacing a statement with multiple statements [#455](https://github.com/Instagram/LibCST/pull/455) -- Add BuiltinScope [#469](https://github.com/Instagram/LibCST/pull/469) -- Add FullyQualifiedNameProvider [#465](https://github.com/Instagram/LibCST/pull/465) +- Add FlattenSentinel to support replacing a statement with multiple statements [#455](https://github.com/Instagram/LibCST/pull/455) +- Add BuiltinScope [#469](https://github.com/Instagram/LibCST/pull/469) +- Add FullyQualifiedNameProvider [#465](https://github.com/Instagram/LibCST/pull/465) ## Updated - -- Split QualifiedNameProvider out from libcst.metadata.scope_provider [#464](https://github.com/Instagram/LibCST/pull/464) +- Split QualifiedNameProvider out from libcst.metadata.scope_provider [#464](https://github.com/Instagram/LibCST/pull/464) ## Fixed - -- Exception while parsing escape character in raw f-strings [#462](https://github.com/Instagram/LibCST/issues/462) - +- Exception while parsing escape character in raw f-strings [#462](https://github.com/Instagram/LibCST/issues/462) # 0.3.17 - 2021-02-08 ## Updated - -- Optimization: reduce the number of unused parallel processes [#440](https://github.com/Instagram/LibCST/pull/440) +- Optimization: reduce the number of unused parallel processes [#440](https://github.com/Instagram/LibCST/pull/440) ## Fixed - -- Walrus operator's left hand side now has STORE expression context [#443](https://github.com/Instagram/LibCST/pull/433) -- ApplyTypeAnnotationsVisitor applies parameter annotations even if no return type is declared [#445](https://github.com/Instagram/LibCST/pull/445) -- Work around Windows problem by using dummy pool for `jobs=1` [#436](https://github.com/Instagram/LibCST/pull/436) -- Remove extra unused imports added in other files [#453](https://github.com/Instagram/LibCST/pull/453) +- Walrus operator's left hand side now has STORE expression context [#443](https://github.com/Instagram/LibCST/pull/433) +- ApplyTypeAnnotationsVisitor applies parameter annotations even if no return type is declared [#445](https://github.com/Instagram/LibCST/pull/445) +- Work around Windows problem by using dummy pool for `jobs=1` [#436](https://github.com/Instagram/LibCST/pull/436) +- Remove extra unused imports added in other files [#453](https://github.com/Instagram/LibCST/pull/453) # 0.3.16 - 2020-12-16 ## Added - -- Support PEP-604 style unions in decorator annotations [#429](https://github.com/Instagram/LibCST/pull/429) -- Gathering exports in augmented assignment statements [#426](https://github.com/Instagram/LibCST/pull/426) +- Support PEP-604 style unions in decorator annotations [#429](https://github.com/Instagram/LibCST/pull/429) +- Gathering exports in augmented assignment statements [#426](https://github.com/Instagram/LibCST/pull/426) ## Fixed - -- Don't allow out of order accesses in the global scope [#431](https://github.com/Instagram/LibCST/pull/431) -- Handle scope ordering in For statements [#430](https://github.com/Instagram/LibCST/pull/430) -- Fix for not parsing subscripts such as `cast()["from"]` [#428](https://github.com/Instagram/LibCST/pull/428) -- Walrus operator's left hand side now has STORE expression context [#433](https://github.com/Instagram/LibCST/pull/433) +- Don't allow out of order accesses in the global scope [#431](https://github.com/Instagram/LibCST/pull/431) +- Handle scope ordering in For statements [#430](https://github.com/Instagram/LibCST/pull/430) +- Fix for not parsing subscripts such as `cast()["from"]` [#428](https://github.com/Instagram/LibCST/pull/428) +- Walrus operator's left hand side now has STORE expression context [#433](https://github.com/Instagram/LibCST/pull/433) # 0.3.15 - 2020-12-01 ## Added - -- Support Named Unicode Characters and yield in f-strings [#424](https://github.com/Instagram/LibCST/pull/424) +- Support Named Unicode Characters and yield in f-strings [#424](https://github.com/Instagram/LibCST/pull/424) ## Fixed - -- Assignment/access ordering in comprehensions [#423](https://github.com/Instagram/LibCST/pull/423) -- Referencing of remaining objects in cast() [#422](https://github.com/Instagram/LibCST/pull/422) +- Assignment/access ordering in comprehensions [#423](https://github.com/Instagram/LibCST/pull/423) +- Referencing of remaining objects in cast() [#422](https://github.com/Instagram/LibCST/pull/422) # 0.3.14 - 2020-11-18 ## Fixed - -- Fix is_annotation for types used in classdef base and assign value [#406](https://github.com/Instagram/LibCST/pull/406) -- Visit concatenated f-strings during scope analysis [#411](https://github.com/Instagram/LibCST/pull/411) -- Correct handling of walrus operator in function args [#417](https://github.com/Instagram/LibCST/pull/417) -- Allow generator expressions in f-strings [#419](https://github.com/Instagram/LibCST/pull/419) -- Keep track of assignment/access ordering during scope analysis [#413](https://github.com/Instagram/LibCST/pull/413) -- Handle string type references in cast() during scope analysis [#418](https://github.com/Instagram/LibCST/pull/418) +- Fix is_annotation for types used in classdef base and assign value [#406](https://github.com/Instagram/LibCST/pull/406) +- Visit concatenated f-strings during scope analysis [#411](https://github.com/Instagram/LibCST/pull/411) +- Correct handling of walrus operator in function args [#417](https://github.com/Instagram/LibCST/pull/417) +- Allow generator expressions in f-strings [#419](https://github.com/Instagram/LibCST/pull/419) +- Keep track of assignment/access ordering during scope analysis [#413](https://github.com/Instagram/LibCST/pull/413) +- Handle string type references in cast() during scope analysis [#418](https://github.com/Instagram/LibCST/pull/418) # 0.3.13 - 2020-10-12 ## Fixed - -- Use correct type for AugAssign and AnnAssign target [#396](https://github.com/Instagram/LibCST/pull/396) -- Support string annotations for type aliases [#401](https://github.com/Instagram/LibCST/pull/401) +- Use correct type for AugAssign and AnnAssign target [#396](https://github.com/Instagram/LibCST/pull/396) +- Support string annotations for type aliases [#401](https://github.com/Instagram/LibCST/pull/401) # 0.3.12 - 2020-10-01 ## Fixed - -- fix RemoveImportsVisitor crash when ImportAlias is inserted without comma [#397](https://github.com/Instagram/LibCST/pull/397) -- Provide STORE for {Class,Function}Def.name in ExpressionContextProvider [#394](https://github.com/Instagram/LibCST/pull/394) +- fix RemoveImportsVisitor crash when ImportAlias is inserted without comma [#397](https://github.com/Instagram/LibCST/pull/397) +- Provide STORE for {Class,Function}Def.name in ExpressionContextProvider [#394](https://github.com/Instagram/LibCST/pull/394) # 0.3.11 - 2020-09-29 ## Added - -- Implement TypeOf matcher [#384](https://github.com/Instagram/LibCST/pull/384) +- Implement TypeOf matcher [#384](https://github.com/Instagram/LibCST/pull/384) ## Updated - -- Update return type of ParentNodeProvider to be CSTNode [#377](https://github.com/Instagram/LibCST/pull/377) -- Add source code links to each class/function [#378](https://github.com/Instagram/LibCST/pull/378) +- Update return type of ParentNodeProvider to be CSTNode [#377](https://github.com/Instagram/LibCST/pull/377) +- Add source code links to each class/function [#378](https://github.com/Instagram/LibCST/pull/378) ## Fixed - -- Removing an import alias with a trailing standalone comment should preserve the comment [#392](https://github.com/Instagram/LibCST/pull/392) +- Removing an import alias with a trailing standalone comment should preserve the comment [#392](https://github.com/Instagram/LibCST/pull/392) # 0.3.10 - 2020-09-17 ## Added - -- Handle string annotations in ScopeProvider [#373](https://github.com/Instagram/LibCST/pull/373) -- Add is_annotation subtype for Access inreferences. [#372](https://github.com/Instagram/LibCST/pull/372) +- Handle string annotations in ScopeProvider [#373](https://github.com/Instagram/LibCST/pull/373) +- Add is_annotation subtype for Access inreferences. [#372](https://github.com/Instagram/LibCST/pull/372) ## Updated - -- Call pyre query with noninteractive logging [#371](https://github.com/Instagram/LibCST/pull/371) -- Replace matchers with explicit visitation in gatherers [#366](https://github.com/Instagram/LibCST/pull/366) -- Include missing test data in install [#365](https://github.com/Instagram/LibCST/pull/365) +- Call pyre query with noninteractive logging [#371](https://github.com/Instagram/LibCST/pull/371) +- Replace matchers with explicit visitation in gatherers [#366](https://github.com/Instagram/LibCST/pull/366) +- Include missing test data in install [#365](https://github.com/Instagram/LibCST/pull/365) ## Fixed - -- Spaces around walrus operator are not required [#368](https://github.com/Instagram/LibCST/pull/368) -- SaveMachedNode now matches with trailing empty wildcards [#356](https://github.com/Instagram/LibCST/pull/356) -- Correctly extract wildcard matchers [#355](https://github.com/Instagram/LibCST/pull/355) +- Spaces around walrus operator are not required [#368](https://github.com/Instagram/LibCST/pull/368) +- SaveMachedNode now matches with trailing empty wildcards [#356](https://github.com/Instagram/LibCST/pull/356) +- Correctly extract wildcard matchers [#355](https://github.com/Instagram/LibCST/pull/355) # 0.3.9 - 2020-09-07 ## Added - -- Support string type annotations in RemoveUnusedImports [#353](https://github.com/Instagram/LibCST/pull/353) -- Add scope to ImportAlias [#350](https://github.com/Instagram/LibCST/pull/350) -- Add scope to ClassDef [#349](https://github.com/Instagram/LibCST/pull/349) + - Support string type annotations in RemoveUnusedImports [#353](https://github.com/Instagram/LibCST/pull/353) + - Add scope to ImportAlias [#350](https://github.com/Instagram/LibCST/pull/350) + - Add scope to ClassDef [#349](https://github.com/Instagram/LibCST/pull/349) ## Fixed - -- Fixed all pyre related errors [#360](https://github.com/Instagram/LibCST/pull/360) -- Fixed enclosing attribute for attributes in call arguments [#362](https://github.com/Instagram/LibCST/pull/362) + - Fixed all pyre related errors [#360](https://github.com/Instagram/LibCST/pull/360) + - Fixed enclosing attribute for attributes in call arguments [#362](https://github.com/Instagram/LibCST/pull/362) # 0.3.8 - 2020-07-22 ## Added - -- Handle type subscripts when applying annotations. [#335](https://github.com/Instagram/LibCST/pull/335) -- Added FullRepoManager `cache` property [#330](https://github.com/Instagram/LibCST/pull/330) -- Added optional args for tox commands [#327](https://github.com/Instagram/LibCST/pull/327) + - Handle type subscripts when applying annotations. [#335](https://github.com/Instagram/LibCST/pull/335) + - Added FullRepoManager `cache` property [#330](https://github.com/Instagram/LibCST/pull/330) + - Added optional args for tox commands [#327](https://github.com/Instagram/LibCST/pull/327) ## Updated - -- Only remove trailing comma if the last alias is removed [#334](https://github.com/Instagram/LibCST/pull/334) + - Only remove trailing comma if the last alias is removed [#334](https://github.com/Instagram/LibCST/pull/334) ## Fixed - -- Fixed inserting imports after module docstring [#343](https://github.com/Instagram/LibCST/pull/343) -- Fixed ParenthesizedWhitespace before params in FuncDef [#342](https://github.com/Instagram/LibCST/pull/342) -- Fixed validation for ImportAlias and Try statements [#340](https://github.com/Instagram/LibCST/pull/340) -- Fixed NotEqual position issue [#325](https://github.com/Instagram/LibCST/pull/325) -- Fixed minor typo in scope_provider.py [#324](https://github.com/Instagram/LibCST/pull/324) + - Fixed inserting imports after module docstring [#343](https://github.com/Instagram/LibCST/pull/343) + - Fixed ParenthesizedWhitespace before params in FuncDef [#342](https://github.com/Instagram/LibCST/pull/342) + - Fixed validation for ImportAlias and Try statements [#340](https://github.com/Instagram/LibCST/pull/340) + - Fixed NotEqual position issue [#325](https://github.com/Instagram/LibCST/pull/325) + - Fixed minor typo in scope_provider.py [#324](https://github.com/Instagram/LibCST/pull/324) # 0.3.7 - 2020-06-24 ## Added - -- Added `RenameCommand` to rename all instances of a local or imported object to a specified new name. [#308](https://github.com/Instagram/LibCST/pull/308) + - Added `RenameCommand` to rename all instances of a local or imported object to a specified new name. [#308](https://github.com/Instagram/LibCST/pull/308) ## Updated - -- Upgraded Codecov dev dependency to 2.1.4. [#311](https://github.com/Instagram/LibCST/pull/311) -- Enabled Pyre `strict` mode by default. [#313](https://github.com/Instagram/LibCST/pull/313) + - Upgraded Codecov dev dependency to 2.1.4. [#311](https://github.com/Instagram/LibCST/pull/311) + - Enabled Pyre `strict` mode by default. [#313](https://github.com/Instagram/LibCST/pull/313) ## Fixed - -- Fixed `ImportError` under Python 3.9. [#306](https://github.com/Instagram/LibCST/pull/306) -- Fixed `stdout` being plugged into successfully codemod-ed files. [#309](https://github.com/Instagram/LibCST/pull/309) -- Fixed `QualifiedName` retrieval for names with repeated substrings. [#312](https://github.com/Instagram/LibCST/pull/312) -- Fixed default values of keyword-only and positional-only arguments in `ApplyTypeAnnotationsVisitor`. [#314](https://github.com/Instagram/LibCST/pull/314) -- Fixed `ExpressionContextProvider` by giving subscript values a `LOAD`context. [#319](https://github.com/Instagram/LibCST/pull/319) + - Fixed `ImportError` under Python 3.9. [#306](https://github.com/Instagram/LibCST/pull/306) + - Fixed `stdout` being plugged into successfully codemod-ed files. [#309](https://github.com/Instagram/LibCST/pull/309) + - Fixed `QualifiedName` retrieval for names with repeated substrings. [#312](https://github.com/Instagram/LibCST/pull/312) + - Fixed default values of keyword-only and positional-only arguments in `ApplyTypeAnnotationsVisitor`. [#314](https://github.com/Instagram/LibCST/pull/314) + - Fixed `ExpressionContextProvider` by giving subscript values a `LOAD`context. [#319](https://github.com/Instagram/LibCST/pull/319) # 0.3.6 - 2020-05-27 ## Added - -- Added `ConvertNamedTupleToDataclassCommand` to convert `NamedTuple` class declarations to Python 3.7 `dataclasses` using the `@dataclass(frozen=True)` decorator. [#299](https://github.com/Instagram/LibCST/pull/299) + - Added `ConvertNamedTupleToDataclassCommand` to convert `NamedTuple` class declarations to Python 3.7 `dataclasses` using the `@dataclass(frozen=True)` decorator. [#299](https://github.com/Instagram/LibCST/pull/299) ## Fixed - -- Fixed typo in file name `libcst/codemod/commands/convert_percent_format_to_fstring.py`. [#301](https://github.com/Instagram/LibCST/pull/301) -- Fixed `StopIteration` exception during scope analysis matching on import names. [#302](https://github.com/Instagram/LibCST/pull/302) + - Fixed typo in file name `libcst/codemod/commands/convert_percent_format_to_fstring.py`. [#301](https://github.com/Instagram/LibCST/pull/301) + - Fixed `StopIteration` exception during scope analysis matching on import names. [#302](https://github.com/Instagram/LibCST/pull/302) # 0.3.5 - 2020-05-12 ## Updated - -- Expose more granular `Assignments` and `Accesses` for dotted imports in `ScopeProvider`. [#284](https://github.com/Instagram/LibCST/pull/284) -- `get_qualified_names_for` returns the most appropriate qualified name. [#290](https://github.com/Instagram/LibCST/pull/290) -- Surface `SyntaxError` raised by formatter in codemod run. [#288](https://github.com/Instagram/LibCST/pull/288) [#289](https://github.com/Instagram/LibCST/pull/289) -- Rename `ApplyTypeAnnotationsVisitor.add_stub_to_context` as `ApplyTypeAnnotationsVisitor.store_stub_in_context` and add `overwrite_existing_annotations` to allow overwrite existing type annotations. [#289](https://github.com/Instagram/LibCST/pull/291) + - Expose more granular `Assignments` and `Accesses` for dotted imports in `ScopeProvider`. [#284](https://github.com/Instagram/LibCST/pull/284) + - `get_qualified_names_for` returns the most appropriate qualified name. [#290](https://github.com/Instagram/LibCST/pull/290) + - Surface `SyntaxError` raised by formatter in codemod run. [#288](https://github.com/Instagram/LibCST/pull/288) [#289](https://github.com/Instagram/LibCST/pull/289) + - Rename `ApplyTypeAnnotationsVisitor.add_stub_to_context` as `ApplyTypeAnnotationsVisitor.store_stub_in_context` and add `overwrite_existing_annotations` to allow overwrite existing type annotations. [#289](https://github.com/Instagram/LibCST/pull/291) ## Fixed - -- Close opened file handles on finishing codemod to avoid `Too many open files` on OSX. [#283](https://github.com/Instagram/LibCST/pull/283) + - Close opened file handles on finishing codemod to avoid `Too many open files` on OSX. [#283](https://github.com/Instagram/LibCST/pull/283) ## Deprecated - -- `ApplyTypeAnnotationsVisitor.add_stub_to_context` is renamed as `ApplyTypeAnnotationsVisitor.store_stub_in_context`. + - `ApplyTypeAnnotationsVisitor.add_stub_to_context` is renamed as `ApplyTypeAnnotationsVisitor.store_stub_in_context`. # 0.3.4 - 2020-03-27 ## Added - -- Supported CST parsing for Python 3.0, 3.1 and 3.3. [#261](https://github.com/Instagram/LibCST/pull/261) -- Added `RemoveUnusedImportsCommand` for removing unused import codemod. [#266](https://github.com/Instagram/LibCST/pull/266) -- Added `ApplyTypeAnnotationsVisitor.add_stub_to_context` for apply type annotations from stub modules. [#265](https://github.com/Instagram/LibCST/pull/265) + - Supported CST parsing for Python 3.0, 3.1 and 3.3. [#261](https://github.com/Instagram/LibCST/pull/261) + - Added `RemoveUnusedImportsCommand` for removing unused import codemod. [#266](https://github.com/Instagram/LibCST/pull/266) + - Added `ApplyTypeAnnotationsVisitor.add_stub_to_context` for apply type annotations from stub modules. [#265](https://github.com/Instagram/LibCST/pull/265) ## Updated - -- Improved exception message of `get_metadata` when MetadataWrapper is not used. [#257](https://github.com/Instagram/LibCST/pull/257) -- New steps for Pyre type check in README.rst which analyzes installed Python sources for better type checking. [#262](https://github.com/Instagram/LibCST/pull/262) + - Improved exception message of `get_metadata` when MetadataWrapper is not used. [#257](https://github.com/Instagram/LibCST/pull/257) + - New steps for Pyre type check in README.rst which analyzes installed Python sources for better type checking. [#262](https://github.com/Instagram/LibCST/pull/262) ## Fixed - -- Parsed `except(Exception):` correctly while there is no space after except syntax. [#256](https://github.com/Instagram/LibCST/pull/256) -- Fixed `RemoveImportsVisitor` to not remove imports when references still exist. [#264](https://github.com/Instagram/LibCST/pull/264) -- Fixed missing type annotations. [#271](https://github.com/Instagram/LibCST/pull/271) -- `AddImportsVisitor` generates deterministic order for added imports. [#274](https://github.com/Instagram/LibCST/pull/274) + - Parsed `except(Exception):` correctly while there is no space after except syntax. [#256](https://github.com/Instagram/LibCST/pull/256) + - Fixed `RemoveImportsVisitor` to not remove imports when references still exist. [#264](https://github.com/Instagram/LibCST/pull/264) + - Fixed missing type annotations. [#271](https://github.com/Instagram/LibCST/pull/271) + - `AddImportsVisitor` generates deterministic order for added imports. [#274](https://github.com/Instagram/LibCST/pull/274) # 0.3.3 - 2020-03-05 ## Added - -- `ByteSpanPositionProvider` provides start offset and length of CSTNode as metadata. -- `get_docstring` helper provides docstring from `Module`, `ClassDef` and `FunctionDef` node types. + - `ByteSpanPositionProvider` provides start offset and length of CSTNode as metadata. + - `get_docstring` helper provides docstring from `Module`, `ClassDef` and `FunctionDef` node types. ## Updated - -- Optimized `ScopeProvider` performance to run faster and use less memory: - - remove unnecessary `Assignment` of keyword `Arg`. - - don't provide scope object for formatting information nodes. - - batch set union updates in `infer_accesses` step. + - Optimized `ScopeProvider` performance to run faster and use less memory: + - remove unnecessary `Assignment` of keyword `Arg`. + - don't provide scope object for formatting information nodes. + - batch set union updates in `infer_accesses` step. ## Fixed - -- Fixed `_assignments` mutation when calling read-only `Scope.get_qualified_names_for` and `__contains__`. + - Fixed `_assignments` mutation when calling read-only `Scope.get_qualified_names_for` and `__contains__`. # 0.3.2 - 2020-02-24 ## Added - -- Added `RemoveImportsVisitor` to remove an import if it's not used in a module. -- Added `GatherExportsVisitor` to gather exports specified in `__all__`. -- Added property helpers `evaluated_name` and `evaluated_name` in `ImportAlias`. -- Added helper to get full module name: `get_absolute_module_for_import` and `get_absolute_module_for_import_or_raise`. -- Added `CodemodContext.full_module_name` for full dotted module name. -- Added format specifiers f-string conversion support to `ConvertFormatStringCommand`. + - Added `RemoveImportsVisitor` to remove an import if it's not used in a module. + - Added `GatherExportsVisitor` to gather exports specified in `__all__`. + - Added property helpers `evaluated_name` and `evaluated_name` in `ImportAlias`. + - Added helper to get full module name: `get_absolute_module_for_import` and `get_absolute_module_for_import_or_raise`. + - Added `CodemodContext.full_module_name` for full dotted module name. + - Added format specifiers f-string conversion support to `ConvertFormatStringCommand`. ## Updated - -- Moved LibCST version to `_version.py` and can print it by `python -m libcst.tool --version`. -- Improved `EnsureImportPresentCommand` with `--alias` option. -- Improved `ConvertFormatStringCommand` with `--allow-strip-comments` and `--allow-await` options. + - Moved LibCST version to `_version.py` and can print it by `python -m libcst.tool --version`. + - Improved `EnsureImportPresentCommand` with `--alias` option. + - Improved `ConvertFormatStringCommand` with `--allow-strip-comments` and `--allow-await` options. # 0.3.1 - 2020-02-06 ## Added -- Added helpers to get both the raw and evaluated value of a SimpleString. -- Added helpers to get the quoting and prefix of SimpleString and FormattedString. -- Added a helper to get the evaluated value of number types. -- Added templated parsers for statement/expression/module to make constructing updated nodes in transforms easier. -- FullRepoManager is now integrated into codemods, so metadata requiring full repo analysis can now be used. -- Added `get_full_name_for_node_or_raise` helper to remove boilerplate of checking against `None`. + - Added helpers to get both the raw and evaluated value of a SimpleString. + - Added helpers to get the quoting and prefix of SimpleString and FormattedString. + - Added a helper to get the evaluated value of number types. + - Added templated parsers for statement/expression/module to make constructing updated nodes in transforms easier. + - FullRepoManager is now integrated into codemods, so metadata requiring full repo analysis can now be used. + - Added `get_full_name_for_node_or_raise` helper to remove boilerplate of checking against `None`. ## Updated -- Upgraded Pyre dependency to 0.0.41. -- Added additional status to `libcst codemod` command. -- `get_full_name_for_node` now supports decorators. + - Upgraded Pyre dependency to 0.0.41. + - Added additional status to `libcst codemod` command. + - `get_full_name_for_node` now supports decorators. ## Fixed -- Clarified documentation around f-strings, fixed indentation. -- Fixed `libcst list` crashing if a codemod does unsafe work on import. -- Fixed deploy-time dependencies so pyyaml won't have to be manually installed to execute codemods. -- QualifiedNameProvider no longer erroneously claims names inside attributes are built-ins. + - Clarified documentation around f-strings, fixed indentation. + - Fixed `libcst list` crashing if a codemod does unsafe work on import. + - Fixed deploy-time dependencies so pyyaml won't have to be manually installed to execute codemods. + - QualifiedNameProvider no longer erroneously claims names inside attributes are built-ins. # 0.3.0 - 2020-01-16 ## Added -- Added support for parsing and rendering Python 3.8 source code. -- Added more documentation for codemods. -- Added `get_full_name_for_expression` helper method. -- Added `has_name` helper to `QualifiedNameProvider`. -- Added a `--python-version` flag to `libcst.tool print` utility. + - Added support for parsing and rendering Python 3.8 source code. + - Added more documentation for codemods. + - Added `get_full_name_for_expression` helper method. + - Added `has_name` helper to `QualifiedNameProvider`. + - Added a `--python-version` flag to `libcst.tool print` utility. ## Updated -- Codemod command can now discover codemods in subdirectories of configured modules. -- Updgraded Pyre dependency to 0.0.39. + - Codemod command can now discover codemods in subdirectories of configured modules. + - Updgraded Pyre dependency to 0.0.39. ## Fixed -- Cleaned up some typos and formatting issues in comments and documentation. -- Cleaned up a few redundant typevars. -- Fixed callable typing in matchers implementation. -- Fixed incorrect base class references in matcher decorator attribute visitors. -- Fixed codemod test assertion failing for some whitespace edge cases. -- Fixed scope analysis to track variable usage on `del` statements. + - Cleaned up some typos and formatting issues in comments and documentation. + - Cleaned up a few redundant typevars. + - Fixed callable typing in matchers implementation. + - Fixed incorrect base class references in matcher decorator attribute visitors. + - Fixed codemod test assertion failing for some whitespace edge cases. + - Fixed scope analysis to track variable usage on `del` statements. ## Deprecated -- Deprecated exporting `ensure_type` from `libcst` in favor of `libcst.helpers`. + - Deprecated exporting `ensure_type` from `libcst` in favor of `libcst.helpers`. ## Removed -- Removed `ExtSlice` and helper code in favor of `SubscriptElement`. -- Removed `default_params` attribute on `Parameters`. -- Removed `SyntacticPositionProvider` and `BasicPositionProvider`. -- Removed `CodePosition` and `CodeRange` exports on `libcst` in favor of `libcst.metadata`. + - Removed `ExtSlice` and helper code in favor of `SubscriptElement`. + - Removed `default_params` attribute on `Parameters`. + - Removed `SyntacticPositionProvider` and `BasicPositionProvider`. + - Removed `CodePosition` and `CodeRange` exports on `libcst` in favor of `libcst.metadata`. # 0.2.7 - 2020-01-07 ## Updated -- Command-line interface now shows rough estimate of time remaining while executing a codemod. -- Add needed import now supports import aliases. + - Command-line interface now shows rough estimate of time remaining while executing a codemod. + - Add needed import now supports import aliases. # 0.2.6 - 2020-01-01 ## Added -- Added Codemod framework for running code transform over a codebase in parallel. - - Codemod for code transform logic. - - CodemodContext for preserving states across transforms. - - CodemodCommand for CLI interface. - - CodemodTest for testing codemod easily. - - yaml codemod config. - - Pre-build commands in codemod/commands/. -- Added TypeInferenceProvider for inferred type info from Pyre. A regression test suite was included. -- Added FullRepoManager for metadata inter-process cache handing. + - Added Codemod framework for running code transform over a codebase in parallel. + - Codemod for code transform logic. + - CodemodContext for preserving states across transforms. + - CodemodCommand for CLI interface. + - CodemodTest for testing codemod easily. + - yaml codemod config. + - Pre-build commands in codemod/commands/. + - Added TypeInferenceProvider for inferred type info from Pyre. A regression test suite was included. + - Added FullRepoManager for metadata inter-process cache handing. ## Fixed -- Fixed usage link in README. -- Fixed type annotation for Mypy compatibility. + - Fixed usage link in README. + - Fixed type annotation for Mypy compatibility. ## Updated -- Upgraded Pyre to 0.0.38 + - Upgraded Pyre to 0.0.38 # 0.2.5 - 2019-12-05 ## Added -- Added `extract`, `extractall` and `replace` functions to Matchers API. + - Added `extract`, `extractall` and `replace` functions to Matchers API. ## Fixed -- Fixed length restrictions for `AllOf` and `OneOf` so that they can be used with sequence expansion operators. -- Fixed batchable visitors not calling attribute visit functions. -- Fixed typos in docstrings. -- Fixed matcher type exception not being pickleable. + - Fixed length restrictions for `AllOf` and `OneOf` so that they can be used with sequence expansion operators. + - Fixed batchable visitors not calling attribute visit functions. + - Fixed typos in docstrings. + - Fixed matcher type exception not being pickleable. ## Deprecated -- Deprecated parsing function parameters with defaults into `default_params` attribute. They can be found in the `params` attribute instead. + - Deprecated parsing function parameters with defaults into `default_params` attribute. They can be found in the `params` attribute instead. # 0.2.4 - 2019-11-13 ## Fixed -- Fixed broken types for sequence matchers. + - Fixed broken types for sequence matchers. # 0.2.3 - 2019-11-11 ## Added -- Preliminary support for 3.8 walrus operator. -- CI config and fuzz tests for 3.8. -- Experimental re-entrant codegen API. -- Added `unsafe_skip_copy` optimization to `MetadataWrapper`. -- Matchers API now includes a `findall` function. -- Matchers now have a `MatchMetadataIfTrue` special matcher. + - Preliminary support for 3.8 walrus operator. + - CI config and fuzz tests for 3.8. + - Experimental re-entrant codegen API. + - Added `unsafe_skip_copy` optimization to `MetadataWrapper`. + - Matchers API now includes a `findall` function. + - Matchers now have a `MatchMetadataIfTrue` special matcher. ## Updated -- Updated to latest Black release. -- Better type documentation for generated matchers. + - Updated to latest Black release. + - Better type documentation for generated matchers. ## Fixed -- Clarified matchers documentation in several confusing areas. -- Drastically sped up codegen and tests. -- `QualifiedName` now supports imported attributtes. -- `ExpressionContext` properly marks loop variables as `STORE`. -- Various typos in documentation are fixed. + - Clarified matchers documentation in several confusing areas. + - Drastically sped up codegen and tests. + - `QualifiedName` now supports imported attributtes. + - `ExpressionContext` properly marks loop variables as `STORE`. + - Various typos in documentation are fixed. ## Deprecated -- Deprecated `BasicPositionProvider` and `SyntacticPositionProvider` in favor of `WhitespaceInclusivePositionProvider` and `PositionProvider`. + - Deprecated `BasicPositionProvider` and `SyntacticPositionProvider` in favor of `WhitespaceInclusivePositionProvider` and `PositionProvider`. # 0.2.2 - 2019-10-24 ## Added -- Added `deep_with_changes` helper method on CSTNode. -- Added metadata support to matchers. -- Added ability to get the defining node from a `LocalScope` (`FunctionScope`, `ClassScope` or `ComprehensionScope`). + - Added `deep_with_changes` helper method on CSTNode. + - Added metadata support to matchers. + - Added ability to get the defining node from a `LocalScope` (`FunctionScope`, `ClassScope` or `ComprehensionScope`). ## Updated -- Many changes to LibCST documentation including a new best practices page and updated scope tutorial. -- Exported `CodePosition` and `CodeRange` from `libcst.metadata` instead of `libcst`. + - Many changes to LibCST documentation including a new best practices page and updated scope tutorial. + - Exported `CodePosition` and `CodeRange` from `libcst.metadata` instead of `libcst`. ## Fixed -- Disallowed decorating a concrete visit or leave method with `@visit` or `@leave` decorators. -- Renamed position provider classes to be more self-explanatory. -- Fixed trailing newline detection when the last character in a file was from a continuation. -- Fixed `deep_clone` potentially blowing the stack with large LibCST trees. + - Disallowed decorating a concrete visit or leave method with `@visit` or `@leave` decorators. + - Renamed position provider classes to be more self-explanatory. + - Fixed trailing newline detection when the last character in a file was from a continuation. + - Fixed `deep_clone` potentially blowing the stack with large LibCST trees. ## Deprecated -- Deprecated `ExtSlice` in favor of `SubscriptElement`. -- Deprecated parsing `Subscript` slices directly into `Index` or `Slice` nodes. + - Deprecated `ExtSlice` in favor of `SubscriptElement`. + - Deprecated parsing `Subscript` slices directly into `Index` or `Slice` nodes. # 0.2.1 - 2019-10-14 ## Added -- `Scope.assignments` and `Scope.accesses` APIs to access all references in a scope. -- Scope analysis tutorial. + - `Scope.assignments` and `Scope.accesses` APIs to access all references in a scope. + - Scope analysis tutorial. ## Updated -- Supported `` in `Scope.get_qualified_names_for` and `QualifiedName`. -- Enforced identity equality for matchers and immutability of non-dataclass matchers. -- Generalize codegen cleanup steps for all codegen. + - Supported `` in `Scope.get_qualified_names_for` and `QualifiedName`. + - Enforced identity equality for matchers and immutability of non-dataclass matchers. + - Generalize codegen cleanup steps for all codegen. ## Fixed - -- Made `BatchableMetadataProvider` typing covariant over its typevar. -- Fixed LICENSE header on generated matcher file. -- Cleanup unused internal noqa and on-call specification. + - Made `BatchableMetadataProvider` typing covariant over its typevar. + - Fixed LICENSE header on generated matcher file. + - Cleanup unused internal noqa and on-call specification. # 0.2.0 - 2019-10-04 ## Added -- Added matchers which allow comparing LibCST trees against arbitrary patterns. -- Improved tree manipulation with `deep_remove` and `deep_replace` helper methods on CSTNode. -- Added new metadata providers: parent node and qualified name. + - Added matchers which allow comparing LibCST trees against arbitrary patterns. + - Improved tree manipulation with `deep_remove` and `deep_replace` helper methods on CSTNode. + - Added new metadata providers: parent node and qualified name. ## Updated -- Updated Pyre to latest release. -- Updated scope metadata to provide additional helpers. -- Updated preferred method of removing a node from its parent in a visitor. + - Updated Pyre to latest release. + - Updated scope metadata to provide additional helpers. + - Updated preferred method of removing a node from its parent in a visitor. ## Fixed -- Metadata classes and methods are now exported from "libcst.metadata" instead of several submodules. -- Fixed LICENSE file to explicitly reference individual files in the repo with different licenses. -- Fixed `deep_clone` to correctly clone leaf nodes. -- Fixed all parse entrypoints to always return a tree with no duplicated leaf nodes. + - Metadata classes and methods are now exported from "libcst.metadata" instead of several submodules. + - Fixed LICENSE file to explicitly reference individual files in the repo with different licenses. + - Fixed `deep_clone` to correctly clone leaf nodes. + - Fixed all parse entrypoints to always return a tree with no duplicated leaf nodes. # 0.1.3 - 2019-09-18 ## Added -- Added preliminary support for parsing Python 3.5 and Python 3.6 source. -- Added scope analysis metadata provider. -- Added mypy type support for built package. + - Added preliminary support for parsing Python 3.5 and Python 3.6 source. + - Added scope analysis metadata provider. + - Added mypy type support for built package. ## Fixed -- Several typos in documentation are fixed. + - Several typos in documentation are fixed. # 0.1.2 - 2019-08-29 ## Added -- Added attribute visitor hooks. -- Added base visit/leave methods which can be subclassed. -- Hypothesis fuzz testing suite, courtesy of Zac Hatfield-Dodds. + - Added attribute visitor hooks. + - Added base visit/leave methods which can be subclassed. + - Hypothesis fuzz testing suite, courtesy of Zac Hatfield-Dodds. ## Fixed -- Metadata documentation is much more complete. -- Fixed several whitespace validation issues caught by Hypothesis. -- Parser syntax errors are now used inside parser. + - Metadata documentation is much more complete. + - Fixed several whitespace validation issues caught by Hypothesis. + - Parser syntax errors are now used inside parser. # 0.1.1 - 2019-08-20 ## Added -- Metadata interface is now exported. +- Metadata interface is now exported. ## Fixed -- Dependencies are now specified with minimum revisions. -- Lots of documentation fixes. +- Dependencies are now specified with minimum revisions. +- Lots of documentation fixes. # 0.1 - 2019-07-23 ## Added -- First public release of LibCST. -- Complete, fully typed syntax tree for Python 3.6. -- Full suite of tests for each defined node type. + - First public release of LibCST. + - Complete, fully typed syntax tree for Python 3.6. + - Full suite of tests for each defined node type. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 83f431e8..0f7ad8bf 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,80 +1,5 @@ # Code of Conduct -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to make participation in our project and -our community a harassment-free experience for everyone, regardless of age, body -size, disability, ethnicity, sex characteristics, gender identity and expression, -level of experience, education, socio-economic status, nationality, personal -appearance, race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment -include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or -advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic -address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a -professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable -behavior and are expected to take appropriate and fair corrective action in -response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or -reject comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct, or to ban temporarily or -permanently any contributor for other behaviors that they deem inappropriate, -threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies within all project spaces, and it also applies when -an individual is representing the project or its community in public spaces. -Examples of representing a project or community include using an official -project e-mail address, posting via an official social media account, or acting -as an appointed representative at an online or offline event. Representation of -a project may be further defined and clarified by project maintainers. - -This Code of Conduct also applies outside the project spaces when there is a -reasonable belief that an individual's behavior may have a negative impact on -the project or its community. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at . All -complaints will be reviewed and investigated and will result in a response that -is deemed necessary and appropriate to the circumstances. The project team is -obligated to maintain confidentiality with regard to the reporter of an incident. -Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good -faith may face temporary or permanent repercussions as determined by other -members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, -available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html - -[homepage]: https://www.contributor-covenant.org - -For answers to common questions about this code of conduct, see -https://www.contributor-covenant.org/faq +Facebook has adopted a Code of Conduct that we expect project participants to adhere to. +Please read the [full text](https://code.fb.com/codeofconduct/) +so that you can understand what actions will and will not be tolerated. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2e35431d..9af09650 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,32 +9,12 @@ pull requests. ## Pull Requests We actively welcome your pull requests. -### Setup Your Environment - -1. Install a [Rust toolchain](https://rustup.rs) and [uv](https://docs.astral.sh/uv/) -2. Fork the repo on your side -3. Clone the repo - > git clone [your fork.git] libcst - > cd libcst -4. Sync with the main libcst version package - > git fetch --tags https://github.com/instagram/libcst -5. Setup the env - > uv sync - -You are now ready to create your own branch from main, and contribute. -Please provide tests (using unittest), and update the documentation (both docstrings -and sphinx doc), if applicable. - -### Before Submitting Your Pull Request - -1. Format your code - > uv run poe format -2. Run the type checker - > uv run poe typecheck -3. Test your changes - > uv run poe test -4. Check linters - > uv run poe lint +1. Fork the repo and create your branch from `main`. +2. If you've added code that should be tested, add tests. +3. If you've changed APIs, update the documentation. +4. Ensure the test suite passes by `python -m unittest`. +5. Make sure your code lints. +6. If you haven't already, complete the Contributor License Agreement ("CLA"). ## Contributor License Agreement ("CLA") In order to accept your pull request, we need you to submit a CLA. You only need diff --git a/LICENSE b/LICENSE index 5594616f..ad52f20b 100644 --- a/LICENSE +++ b/LICENSE @@ -13,8 +13,8 @@ PSF). These files are: - libcst/_parser/parso/tests/test_fstring.py - libcst/_parser/parso/tests/test_tokenize.py - libcst/_parser/parso/tests/test_utils.py -- native/libcst/src/tokenizer/core/mod.rs -- native/libcst/src/tokenizer/core/string_types.rs +- libcst_native/src/tokenize/core/mod.rs +- libcst_native/src/tokenize/core/string_types.rs Some Python files have been taken from dataclasses and are therefore Apache licensed. Modifications on these files are licensed under Apache 2.0 license. diff --git a/MAINTAINERS.md b/MAINTAINERS.md deleted file mode 100644 index a7d79006..00000000 --- a/MAINTAINERS.md +++ /dev/null @@ -1,12 +0,0 @@ -# How to make a new release - -1. Add a new entry to `CHANGELOG.md` (I normally use the [new release page](https://github.com/Instagram/LibCST/releases/new) to generate a changelog, then manually group) - 1. Follow the existing format: `Fixed`, `Added`, `Updated`, `Deprecated`, `Removed`, `New Contributors` sections, and the full changelog link at the bottom. - 1. Mention only user-visible changes - improvements to CI, tests, or development workflow aren't noteworthy enough - 1. Version bumps are generally not worth mentioning with some notable exceptions (like pyo3) - 1. Group related PRs into one bullet point if it makes sense -2. manually bump versions in `Cargo.toml` files in the repo -3. run `cargo update -p libcst` -4. make a new PR with the above changes, get it reviewed and landed -5. make a new release on Github, create a new tag on publish, and copy the contents of the changelog entry in there -6. after publishing, check out the repo at the new tag, and run `cd native; cargo +nightly publish -Z package-workspace -p libcst_derive -p libcst` diff --git a/MANIFEST.in b/MANIFEST.in index 8fd03bd0..0f3912c6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,4 @@ -include README.rst LICENSE CODE_OF_CONDUCT.md CONTRIBUTING.md docs/source/*.rst libcst/py.typed +include README.rst LICENSE CODE_OF_CONDUCT.md CONTRIBUTING.md requirements.txt requirements-dev.txt docs/source/*.rst libcst/py.typed include native/Cargo.toml recursive-include native * -recursive-exclude native/target * \ No newline at end of file diff --git a/README.rst b/README.rst index aaff5f41..9f374c4d 100644 --- a/README.rst +++ b/README.rst @@ -4,13 +4,9 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python -|support-ukraine| |readthedocs-badge| |ci-badge| |pypi-badge| |pypi-download| |notebook-badge| |types-badge| +|readthedocs-badge| |ci-badge| |codecov-badge| |pypi-badge| |pypi-download| |notebook-badge| -.. |support-ukraine| image:: https://img.shields.io/badge/Support-Ukraine-FFD500?style=flat&labelColor=005BBB - :alt: Support Ukraine - Help Provide Humanitarian Aid to Ukraine. - :target: https://opensource.fb.com/support-ukraine - -.. |readthedocs-badge| image:: https://readthedocs.org/projects/libcst/badge/?version=latest&style=flat +.. |readthedocs-badge| image:: https://readthedocs.org/projects/pip/badge/?version=latest&style=flat :target: https://libcst.readthedocs.io/en/latest/ :alt: Documentation @@ -18,6 +14,10 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python :target: https://github.com/Instagram/LibCST/actions/workflows/build.yml?query=branch%3Amain :alt: Github Actions +.. |codecov-badge| image:: https://codecov.io/gh/Instagram/LibCST/branch/main/graph/badge.svg + :target: https://codecov.io/gh/Instagram/LibCST/branch/main + :alt: CodeCov + .. |pypi-badge| image:: https://img.shields.io/pypi/v/libcst.svg :target: https://pypi.org/project/libcst :alt: PYPI @@ -31,13 +31,9 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python :target: https://mybinder.org/v2/gh/Instagram/LibCST/main?filepath=docs%2Fsource%2Ftutorial.ipynb :alt: Notebook -.. |types-badge| image:: https://img.shields.io/pypi/types/libcst - :target: https://pypi.org/project/libcst - :alt: PYPI - Types - .. intro-start -LibCST parses Python 3.0 -> 3.14 source code as a CST tree that keeps +LibCST parses Python 3.0, 3.1, 3.3, 3.5, 3.6, 3.7 or 3.8 source code as a CST tree that keeps all formatting details (comments, whitespaces, parentheses, etc). It's useful for building automated refactoring (codemod) applications and linters. @@ -62,9 +58,7 @@ Example expression:: 1 + 2 -CST representation: - -.. code-block:: python +CST representation:: BinaryOperation( left=Integer( @@ -127,7 +121,7 @@ For a more detailed usage example, `see our documentation Installation ------------ -LibCST requires Python 3.9+ and can be easily installed using most common Python +LibCST requires Python 3.6+ and can be easily installed using most common Python packaging tools. We recommend installing the latest stable release from `PyPI `_ with pip: @@ -135,11 +129,6 @@ packaging tools. We recommend installing the latest stable release from pip install libcst -For parsing, LibCST ships with a native extension, so releases are distributed as binary -wheels as well as the source code. If a binary wheel is not available for your system -(Linux/Windows x86/x64 and Mac x64/arm are covered), you'll need a recent -`Rust toolchain `_ for installing. - Further Reading --------------- - `Static Analysis at Scale: An Instagram Story. `_ @@ -148,8 +137,42 @@ Further Reading Development ----------- -See `CONTRIBUTING.md `_ for more details. +Start by setting up and activating a virtualenv: +.. code-block:: shell + + git clone git@github.com:Instagram/LibCST.git libcst + cd libcst + python3 -m venv ../libcst-env/ # just an example, put this wherever you want + source ../libcst-env/bin/activate + pip install --upgrade pip # optional, if you have an old system version of pip + pip install -r requirements.txt -r requirements-dev.txt + # If you're done with the virtualenv, you can leave it by running: + deactivate + +We use `ufmt `_ to format code. To format +changes to be conformant, run the following in the root: + +.. code-block:: shell + + ufmt format && python -m fixit.cli.apply_fix + +To run all tests, you'll need to do the following in the root: + +.. code-block:: shell + + python -m unittest + +You can also run individual tests by using unittest and specifying a module like +this: + +.. code-block:: shell + + python -m unittest libcst.tests.test_batched_visitor + +See the `unittest documentation `_ +for more examples of how to run tests. + Building ~~~~~~~~ @@ -166,11 +189,13 @@ directory: cargo build -The ``libcst.native`` module should be rebuilt automatically, but to force it: +To build the ``libcst.native`` module and install ``libcst``, run this +from the root: .. code-block:: shell - uv sync --reinstall-package libcst + pip uninstall -y libcst + pip install -e . Type Checking ~~~~~~~~~~~~~ @@ -181,7 +206,10 @@ To verify types for the library, do the following in the root: .. code-block:: shell - uv run poe typecheck + pyre check + +*Note:* You may need to run the ``pip install -e .`` command prior +to type checking, see the section above on building. Generating Documents ~~~~~~~~~~~~~~~~~~~~ @@ -190,7 +218,7 @@ To generate documents, do the following in the root: .. code-block:: shell - uv run --group docs poe docs + sphinx-build docs/source/ docs/build/ Future ====== diff --git a/apt.txt b/apt.txt deleted file mode 100644 index a3e85e90..00000000 --- a/apt.txt +++ /dev/null @@ -1,2 +0,0 @@ -rustc -cargo \ No newline at end of file diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000..32f5ee69 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,4 @@ +coverage: + status: + project: no + patch: yes diff --git a/docs/source/codemods_tutorial.rst b/docs/source/codemods_tutorial.rst index 6f657fbc..78365ca9 100644 --- a/docs/source/codemods_tutorial.rst +++ b/docs/source/codemods_tutorial.rst @@ -26,7 +26,7 @@ then edit the produced ``.libcst.codemod.yaml`` file:: python3 -m libcst.tool initialize . The file includes provisions for customizing any generated code marker, calling an -external code formatter such as `black `_, blacklisting +external code formatter such as `black `_, blackisting patterns of files you never wish to touch and a list of modules that contain valid codemods that can be executed. If you want to write and run codemods specific to your repository or organization, you can add an in-repo module location to the list of @@ -135,18 +135,16 @@ replaces any string which matches our string command-line argument with a consta It also takes care of adding the import required for the constant to be defined properly. Cool! Let's look at the command-line help for this codemod. Let's assume you saved it -as ``constant_folding.py``. You can get help for the +as ``constant_folding.py`` inside ``libcst.codemod.commands``. You can get help for the codemod by running the following command:: - python3 -m libcst.tool codemod -x constant_folding.ConvertConstantCommand --help + python3 -m libcst.tool codemod constant_folding.ConvertConstantCommand --help Notice that along with the default arguments, the ``--string`` and ``--constant`` arguments are present in the help, and the command-line description has been updated with the codemod's description string. You'll notice that the codemod also shows up on ``libcst.tool list``. -And ``-x`` flag allows to load any module as a codemod in addition to the standard ones. - ---------------- Testing Codemods ---------------- diff --git a/docs/source/conf.py b/docs/source/conf.py index d3311e90..2f20dcab 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -71,7 +71,7 @@ master_doc = "index" # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = "en" +language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -196,7 +196,6 @@ intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True - # -- autodoc customization def strip_class_signature(app, what, name, obj, options, signature, return_annotation): if what == "class": @@ -219,7 +218,7 @@ def setup(app): nbsphinx_prolog = r""" -{% set docname = 'docs/source/' + env.doc2path(env.docname, base=None)|string%} +{% set docname = 'docs/source/' + env.doc2path(env.docname, base=None) %} .. only:: html diff --git a/docs/source/helpers.rst b/docs/source/helpers.rst index 3cf5abfb..e4b94d2b 100644 --- a/docs/source/helpers.rst +++ b/docs/source/helpers.rst @@ -32,18 +32,3 @@ Functions that assist in traversing an existing LibCST tree. .. autofunction:: libcst.helpers.get_full_name_for_node .. autofunction:: libcst.helpers.get_full_name_for_node_or_raise .. autofunction:: libcst.helpers.ensure_type - -Node fields filtering Helpers ------------------------------ - -Function that assist when handling CST nodes' fields. - -.. autofunction:: libcst.helpers.filter_node_fields - -And lower level functions: - -.. autofunction:: libcst.helpers.get_node_fields -.. autofunction:: libcst.helpers.is_whitespace_node_field -.. autofunction:: libcst.helpers.is_syntax_node_field -.. autofunction:: libcst.helpers.is_default_node_field -.. autofunction:: libcst.helpers.get_field_default_value diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index 9e450c97..bdda7a21 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -18,10 +18,10 @@ numbers of nodes through the :class:`~libcst.metadata.PositionProvider`: .. code-block:: python class NamePrinter(cst.CSTVisitor): - METADATA_DEPENDENCIES = (cst.metadata.PositionProvider,) + METADATA_DEPENDENCIES = (cst.PositionProvider,) def visit_Name(self, node: cst.Name) -> None: - pos = self.get_metadata(cst.metadata.PositionProvider, node).start + pos = self.get_metadata(cst.PositionProvider, node).start print(f"{node.value} found at line {pos.line}, column {pos.column}") wrapper = cst.metadata.MetadataWrapper(cst.parse_module("x = 1")) @@ -94,7 +94,7 @@ declaring one of :class:`~libcst.metadata.PositionProvider` or most cases, :class:`~libcst.metadata.PositionProvider` is what you probably want. -Node positions are represented with :class:`~libcst.metadata.CodeRange` +Node positions are is represented with :class:`~libcst.metadata.CodeRange` objects. See :ref:`the above example`. .. autoclass:: libcst.metadata.PositionProvider @@ -134,7 +134,7 @@ New scopes are created for classes, functions, and comprehensions. Other block constructs like conditional statements, loops, and try…except don't create their own scope. -There are five different types of scopes in Python: +There are five different type of scope in Python: :class:`~libcst.metadata.BuiltinScope`, :class:`~libcst.metadata.GlobalScope`, :class:`~libcst.metadata.ClassScope`, @@ -226,14 +226,6 @@ We provide :class:`~libcst.metadata.ParentNodeProvider` for those use cases. .. autoclass:: libcst.metadata.ParentNodeProvider :no-undoc-members: -File Path Metadata ------------------- -This provides the absolute file path on disk for any module being visited. -Requires an active :class:`~libcst.metadata.FullRepoManager` when using this provider. - -.. autoclass:: libcst.metadata.FilePathProvider - :no-undoc-members: - Type Inference Metadata ----------------------- `Type inference `__ is to automatically infer @@ -242,8 +234,8 @@ In Python, type checkers like `Mypy `_ or `Pyre `__ analyze `type annotations `__ and infer types for expressions. :class:`~libcst.metadata.TypeInferenceProvider` is provided by `Pyre Query API `__ -which requires `setup watchman `_ for incremental typechecking. -:class:`~libcst.metadata.FullRepoManager` is built for manage the inter process communication to Pyre. +which requires `setup watchman `_ for incremental typechecking. +:class:`~libcst.metadata.FullRepoManger` is built for manage the inter process communication to Pyre. .. autoclass:: libcst.metadata.TypeInferenceProvider :no-undoc-members: diff --git a/docs/source/scope_tutorial.ipynb b/docs/source/scope_tutorial.ipynb index 179e2ed7..e4d4393d 100644 --- a/docs/source/scope_tutorial.ipynb +++ b/docs/source/scope_tutorial.ipynb @@ -90,7 +90,7 @@ "source": [ "Warn on unused imports and undefined references\n", "===============================================\n", - "To find all unused imports, we iterate through :attr:`~libcst.metadata.Scope.assignments` and an assignment is unused when its :attr:`~libcst.metadata.BaseAssignment.references` is empty. To find all undefined references, we iterate through :attr:`~libcst.metadata.Scope.accesses` (we focus on :class:`~libcst.Import`/:class:`~libcst.ImportFrom` assignments) and an access is undefined reference when its :attr:`~libcst.metadata.Access.referents` is empty. When reporting the warning to the developer, we'll want to report the line number and column offset along with the suggestion to make it more clear. We can get position information from :class:`~libcst.metadata.PositionProvider` and print the warnings as follows.\n" + "To find all unused imports, we iterate through :attr:`~libcst.metadata.Scope.assignments` and an assignment is unused when its :attr:`~libcst.metadata.BaseAssignment.references` is empty. To find all undefined references, we iterate through :attr:`~libcst.metadata.Scope.accesses` (we focus on :class:`~libcst.Import`/:class:`~libcst.ImportFrom` assignments) and an access is undefined reference when its :attr:`~libcst.metadata.Access.referents` is empty. When reporting the warning to developer, we'll want to report the line number and column offset along with the suggestion to make it more clear. We can get position information from :class:`~libcst.metadata.PositionProvider` and print the warnings as follows.\n" ] }, { @@ -136,13 +136,13 @@ "Automatically Remove Unused Import\n", "==================================\n", "Unused import is a commmon code suggestion provided by lint tool like `flake8 F401 `_ ``imported but unused``.\n", - "Even though reporting unused imports is already useful, with LibCST we can provide an automatic fix to remove unused imports. That can make the suggestion more actionable and save developer's time.\n", + "Even though reporting unused import is already useful, with LibCST we can provide automatic fix to remove unused import. That can make the suggestion more actionable and save developer's time.\n", "\n", "An import statement may import multiple names, we want to remove those unused names from the import statement. If all the names in the import statement are not used, we remove the entire import.\n", "To remove the unused name, we implement ``RemoveUnusedImportTransformer`` by subclassing :class:`~libcst.CSTTransformer`. We overwrite ``leave_Import`` and ``leave_ImportFrom`` to modify the import statements.\n", - "When we find the import node in the lookup table, we iterate through all ``names`` and keep used names in ``names_to_keep``.\n", + "When we find the import node in lookup table, we iterate through all ``names`` and keep used names in ``names_to_keep``.\n", "If ``names_to_keep`` is empty, all names are unused and we remove the entire import node.\n", - "Otherwise, we update the import node and just remove partial names." + "Otherwise, we update the import node and just removing partial names." ] }, { @@ -195,7 +195,7 @@ "raw_mimetype": "text/restructuredtext" }, "source": [ - "After the transform, we use ``.code`` to generate the fixed code and all unused names are fixed as expected! The difflib is used to show only the changed part and only imported lines are updated as expected." + "After the transform, we use ``.code`` to generate fixed code and all unused names are fixed as expected! The difflib is used to show only changed part and only import lines are updated as expected." ] }, { diff --git a/docs/source/tutorial.ipynb b/docs/source/tutorial.ipynb index 1b1ad00d..a7c3cd6b 100644 --- a/docs/source/tutorial.ipynb +++ b/docs/source/tutorial.ipynb @@ -1,25 +1,24 @@ { "cells": [ { + "cell_type": "raw", "metadata": { "raw_mimetype": "text/restructuredtext" }, - "cell_type": "raw", "source": [ "====================\n", "Parsing and Visiting\n", "====================\n", "\n", - "LibCST provides helpers to parse source code string as a concrete syntax tree. In order to perform static analysis to identify patterns in the tree or modify the tree programmatically, we can use the visitor pattern to traverse the tree. In this tutorial, we demonstrate a common four-step-workflow to build an automated refactoring (codemod) application:\n", + "LibCST provides helpers to parse source code string as concrete syntax tree. In order to perform static analysis to identify patterns in the tree or modify the tree programmatically, we can use visitor pattern to traverse the tree. In this tutorial, we demonstrate a common three-step-workflow to build an automated refactoring (codemod) application:\n", "\n", "1. `Parse Source Code <#Parse-Source-Code>`_\n", - "2. `Display The Source Code CST <#Display-Source-Code-CST>`_\n", - "3. `Build Visitor or Transformer <#Build-Visitor-or-Transformer>`_\n", - "4. `Generate Source Code <#Generate-Source-Code>`_\n", + "2. `Build Visitor or Transformer <#Build-Visitor-or-Transformer>`_\n", + "3. `Generate Source Code <#Generate-Source-Code>`_\n", "\n", "Parse Source Code\n", "=================\n", - "LibCST provides various helpers to parse source code as a concrete syntax tree: :func:`~libcst.parse_module`, :func:`~libcst.parse_expression` and :func:`~libcst.parse_statement` (see :doc:`Parsing ` for more detail)." + "LibCST provides various helpers to parse source code as concrete syntax tree: :func:`~libcst.parse_module`, :func:`~libcst.parse_expression` and :func:`~libcst.parse_statement` (see :doc:`Parsing ` for more detail). The default :class:`~libcst.CSTNode` repr provides pretty print formatting for reading the tree easily." ] }, { @@ -42,42 +41,7 @@ "source": [ "import libcst as cst\n", "\n", - "source_tree = cst.parse_expression(\"1 + 2\")" - ] - }, - { - "metadata": { - "raw_mimetype": "text/restructuredtext" - }, - "cell_type": "raw", - "source": [ - "|\n", - "Display Source Code CST\n", - "=======================\n", - "The default :class:`~libcst.CSTNode` repr provides pretty print formatting for displaying the entire CST tree." - ] - }, - { - "metadata": {}, - "cell_type": "code", - "outputs": [], - "execution_count": null, - "source": "print(source_tree)" - }, - { - "metadata": {}, - "cell_type": "raw", - "source": "The entire CST tree may be overwhelming at times. To only focus on essential elements of the CST tree, LibCST provides the ``dump`` helper." - }, - { - "metadata": {}, - "cell_type": "code", - "outputs": [], - "execution_count": null, - "source": [ - "from libcst.display import dump\n", - "\n", - "print(dump(source_tree))" + "cst.parse_expression(\"1 + 2\")" ] }, { @@ -86,11 +50,9 @@ "raw_mimetype": "text/restructuredtext" }, "source": [ - " \n", - "|\n", "Example: add typing annotation from pyi stub file to Python source\n", "------------------------------------------------------------------\n", - "Python `typing annotation `_ was added in Python 3.5. Some Python applications add typing annotations in separate ``pyi`` stub files in order to support old Python versions. When applications decide to stop supporting old Python versions, they'll want to automatically copy the type annotation from a pyi file to a source file. Here we demonstrate how to do that easily using LibCST. The first step is to parse the pyi stub and source files as trees." + "Python `typing annotation `_ was added in Python 3.5. Some Python applications add typing annotations in separate ``pyi`` stub files in order to support old Python versions. When applications decide to stop supporting old Python versions, they'll want to automatically copy the type annotation from a pyi file to a source file. Here we demonstrate how to do that easliy using LibCST. The first step is to parse the pyi stub and source files as trees." ] }, { @@ -106,7 +68,7 @@ " self._replace(type=self.type.name))\n", "\n", "def tokenize(code, version_info, start_pos=(1, 0)):\n", - " \"\"\"Generate tokens from the source code (string).\"\"\"\n", + " \"\"\"Generate tokens from a the source code (string).\"\"\"\n", " lines = split_lines(code, keepends=True)\n", " return tokenize_lines(lines, version_info, start_pos=start_pos)\n", "'''\n", @@ -130,11 +92,10 @@ "raw_mimetype": "text/restructuredtext" }, "source": [ - "|\n", "Build Visitor or Transformer\n", "============================\n", "For traversing and modifying the tree, LibCST provides Visitor and Transformer classes similar to the `ast module `_. To implement a visitor (read only) or transformer (read/write), simply implement a subclass of :class:`~libcst.CSTVisitor` or :class:`~libcst.CSTTransformer` (see :doc:`Visitors ` for more detail).\n", - "In the typing example, we need to implement a visitor to collect typing annotations from the stub tree and a transformer to copy the annotation to the function signature. In the visitor, we implement ``visit_FunctionDef`` to collect annotations. Later in the transformer, we implement ``leave_FunctionDef`` to add the collected annotations." + "In the typing example, we need to implement a visitor to collect typing annotation from the stub tree and a transformer to copy the annotation to the function signature. In the visitor, we implement ``visit_FunctionDef`` to collect annotations. Later in the transformer, we implement ``leave_FunctionDef`` to add the collected annotations." ] }, { @@ -223,10 +184,9 @@ "raw_mimetype": "text/restructuredtext" }, "source": [ - "|\n", "Generate Source Code\n", "====================\n", - "Generating the source code from a cst tree is as easy as accessing the :attr:`~libcst.Module.code` attribute on :class:`~libcst.Module`. After the code generation, we often use `ufmt `_ to reformat the code to keep a consistent coding style." + "Generating the source code from a cst tree is as easy as accessing the :attr:`~libcst.Module.code` attribute on :class:`~libcst.Module`. After the code generation, we often use `ufmt `_ to reformate the code to keep a consistent coding style." ] }, { diff --git a/libcst/__init__.py b/libcst/__init__.py index 0cd54d62..ff63033d 100644 --- a/libcst/__init__.py +++ b/libcst/__init__.py @@ -4,7 +4,7 @@ # LICENSE file in the root directory of this source tree. from libcst._batched_visitor import BatchableCSTVisitor, visit_batched -from libcst._exceptions import CSTLogicError, MetadataException, ParserSyntaxError +from libcst._exceptions import MetadataException, ParserSyntaxError from libcst._flatten_sentinel import FlattenSentinel from libcst._maybe_sentinel import MaybeSentinel from libcst._metadata_dependent import MetadataDependent @@ -29,7 +29,6 @@ from libcst._nodes.expression import ( BaseSimpleComp, BaseSlice, BaseString, - BaseTemplatedStringContent, BinaryOperation, BooleanOperation, Call, @@ -76,9 +75,6 @@ from libcst._nodes.expression import ( StarredElement, Subscript, SubscriptElement, - TemplatedString, - TemplatedStringExpression, - TemplatedStringText, Tuple, UnaryOperation, Yield, @@ -187,7 +183,6 @@ from libcst._nodes.statement import ( MatchValue, NameItem, Nonlocal, - ParamSpec, Pass, Raise, Return, @@ -195,11 +190,6 @@ from libcst._nodes.statement import ( SimpleStatementSuite, Try, TryStar, - TypeAlias, - TypeParam, - TypeParameters, - TypeVar, - TypeVarTuple, While, With, WithItem, @@ -246,7 +236,6 @@ __all__ = [ "CSTVisitorT", "FlattenSentinel", "MaybeSentinel", - "CSTLogicError", "MetadataException", "ParserSyntaxError", "PartialParserConfig", @@ -272,7 +261,6 @@ __all__ = [ "BaseElement", "BaseExpression", "BaseFormattedStringContent", - "BaseTemplatedStringContent", "BaseList", "BaseNumber", "BaseSet", @@ -296,9 +284,6 @@ __all__ = [ "FormattedString", "FormattedStringExpression", "FormattedStringText", - "TemplatedString", - "TemplatedStringText", - "TemplatedStringExpression", "From", "GeneratorExp", "IfExp", @@ -453,10 +438,4 @@ __all__ = [ "VisitorMetadataProvider", "MetadataDependent", "MetadataWrapper", - "TypeVar", - "TypeVarTuple", - "ParamSpec", - "TypeParam", - "TypeParameters", - "TypeAlias", ] diff --git a/libcst/_add_slots.py b/libcst/_add_slots.py index 7012ce1a..6e9c0041 100644 --- a/libcst/_add_slots.py +++ b/libcst/_add_slots.py @@ -1,10 +1,8 @@ # This file is derived from github.com/ericvsmith/dataclasses, and is Apache 2 licensed. # https://github.com/ericvsmith/dataclasses/blob/ae712dd993420d43444f188f452/LICENSE.txt # https://github.com/ericvsmith/dataclasses/blob/ae712dd993420d43444f/dataclass_tools.py -# Changed: takes slots in base classes into account when creating slots import dataclasses -from itertools import chain, filterfalse from typing import Any, Mapping, Type, TypeVar _T = TypeVar("_T") @@ -21,14 +19,7 @@ def add_slots(cls: Type[_T]) -> Type[_T]: # Create a new dict for our new class. cls_dict = dict(cls.__dict__) field_names = tuple(f.name for f in dataclasses.fields(cls)) - inherited_slots = set( - chain.from_iterable( - superclass.__dict__.get("__slots__", ()) for superclass in cls.mro() - ) - ) - cls_dict["__slots__"] = tuple( - filterfalse(inherited_slots.__contains__, field_names) - ) + cls_dict["__slots__"] = field_names for field_name in field_names: # Remove our attributes, if present. They'll still be # available in _MARKER. @@ -38,10 +29,19 @@ def add_slots(cls: Type[_T]) -> Type[_T]: # Create the class. qualname = getattr(cls, "__qualname__", None) - - # pyre-fixme[9]: cls has type `Type[Variable[_T]]`; used as `_T`. - # pyre-fixme[19]: Expected 0 positional arguments. - cls = type(cls)(cls.__name__, cls.__bases__, cls_dict) + try: + # GenericMeta in py3.6 requires us to track __orig_bases__. This is fixed in py3.7 + # by the removal of GenericMeta. We should just be able to use cls.__bases__ in the + # future. + bases = getattr(cls, "__orig_bases__", cls.__bases__) + # pyre-fixme[9]: cls has type `Type[Variable[_T]]`; used as `_T`. + # pyre-fixme[19]: Expected 0 positional arguments. + cls = type(cls)(cls.__name__, bases, cls_dict) + except TypeError: + # We're in py3.7 and should use cls.__bases__ + # pyre-fixme[9]: cls has type `Type[Variable[_T]]`; used as `_T`. + # pyre-fixme[19]: Expected 0 positional arguments. + cls = type(cls)(cls.__name__, cls.__bases__, cls_dict) if qualname is not None: cls.__qualname__ = qualname @@ -50,14 +50,12 @@ def add_slots(cls: Type[_T]) -> Type[_T]: def __getstate__(self: object) -> Mapping[str, Any]: return { - field.name: getattr(self, field.name) - for field in dataclasses.fields(self) - if hasattr(self, field.name) + slot: getattr(self, slot) for slot in self.__slots__ if hasattr(self, slot) } def __setstate__(self: object, state: Mapping[str, Any]) -> None: - for fieldname, value in state.items(): - object.__setattr__(self, fieldname, value) + for slot, value in state.items(): + object.__setattr__(self, slot, value) cls.__getstate__ = __getstate__ cls.__setstate__ = __setstate__ diff --git a/libcst/_exceptions.py b/libcst/_exceptions.py index 4d3dd386..0ba689c2 100644 --- a/libcst/_exceptions.py +++ b/libcst/_exceptions.py @@ -4,11 +4,18 @@ # LICENSE file in the root directory of this source tree. from enum import auto, Enum -from typing import Any, Callable, final, Optional, Sequence, Tuple +from typing import Any, Callable, Iterable, Optional, Sequence, Tuple, Union +from typing_extensions import final + +from libcst._parser.parso.pgen2.generator import ReservedString +from libcst._parser.parso.python.token import PythonTokenTypes, TokenType +from libcst._parser.types.token import Token from libcst._tabs import expand_tabs - +_EOF_STR: str = "end of file (EOF)" +_INDENT_STR: str = "an indent" +_DEDENT_STR: str = "a dedent" _NEWLINE_CHARS: str = "\r\n" @@ -16,10 +23,42 @@ class EOFSentinel(Enum): EOF = auto() -class CSTLogicError(Exception): - """General purpose internal error within LibCST itself.""" +def get_expected_str( + encountered: Union[Token, EOFSentinel], + expected: Union[Iterable[Union[TokenType, ReservedString]], EOFSentinel], +) -> str: + if ( + isinstance(encountered, EOFSentinel) + or encountered.type is PythonTokenTypes.ENDMARKER + ): + encountered_str = _EOF_STR + elif encountered.type is PythonTokenTypes.INDENT: + encountered_str = _INDENT_STR + elif encountered.type is PythonTokenTypes.DEDENT: + encountered_str = _DEDENT_STR + else: + encountered_str = repr(encountered.string) - pass + if isinstance(expected, EOFSentinel): + expected_names = [_EOF_STR] + else: + expected_names = sorted( + [ + repr(el.name) if isinstance(el, TokenType) else repr(el.value) + for el in expected + ] + ) + + if len(expected_names) > 10: + # There's too many possibilities, so it's probably not useful to list them. + # Instead, let's just abbreviate the message. + return f"Unexpectedly encountered {encountered_str}." + else: + if len(expected_names) == 1: + expected_str = expected_names[0] + else: + expected_str = f"{', '.join(expected_names[:-1])}, or {expected_names[-1]}" + return f"Encountered {encountered_str}, but expected {expected_str}." # pyre-fixme[2]: 'Any' type isn't pyre-strict. diff --git a/libcst/_metadata_dependent.py b/libcst/_metadata_dependent.py index 4faf7472..6a768270 100644 --- a/libcst/_metadata_dependent.py +++ b/libcst/_metadata_dependent.py @@ -7,17 +7,14 @@ import inspect from abc import ABC from contextlib import contextmanager from typing import ( - Callable, cast, ClassVar, Collection, - Generic, Iterator, Mapping, Type, TYPE_CHECKING, TypeVar, - Union, ) if TYPE_CHECKING: @@ -32,28 +29,7 @@ if TYPE_CHECKING: _T = TypeVar("_T") - -class _UNDEFINED_DEFAULT: - pass - - -class LazyValue(Generic[_T]): - """ - The class for implementing a lazy metadata loading mechanism that improves the - performance when retriving expensive metadata (e.g., qualified names). Providers - including :class:`~libcst.metadata.QualifiedNameProvider` use this class to load - the metadata of a certain node lazily when calling - :func:`~libcst.MetadataDependent.get_metadata`. - """ - - def __init__(self, callable: Callable[[], _T]) -> None: - self.callable = callable - self.return_value: Union[_T, Type[_UNDEFINED_DEFAULT]] = _UNDEFINED_DEFAULT - - def __call__(self) -> _T: - if self.return_value is _UNDEFINED_DEFAULT: - self.return_value = self.callable() - return cast(_T, self.return_value) +_UNDEFINED_DEFAULT = object() class MetadataDependent(ABC): @@ -131,9 +107,6 @@ class MetadataDependent(ABC): ) if default is not _UNDEFINED_DEFAULT: - value = self.metadata[key].get(node, default) + return cast(_T, self.metadata[key].get(node, default)) else: - value = self.metadata[key][node] - if isinstance(value, LazyValue): - value = value() - return cast(_T, value) + return cast(_T, self.metadata[key][node]) diff --git a/libcst/_nodes/base.py b/libcst/_nodes/base.py index 666fe311..9173414b 100644 --- a/libcst/_nodes/base.py +++ b/libcst/_nodes/base.py @@ -6,9 +6,8 @@ from abc import ABC, abstractmethod from copy import deepcopy from dataclasses import dataclass, field, fields, replace -from typing import Any, cast, ClassVar, Dict, List, Mapping, Sequence, TypeVar, Union +from typing import Any, cast, Dict, List, Mapping, Sequence, TypeVar, Union -from libcst import CSTLogicError from libcst._flatten_sentinel import FlattenSentinel from libcst._nodes.internal import CodegenState from libcst._removal_sentinel import RemovalSentinel @@ -110,8 +109,6 @@ def _clone(val: object) -> object: @dataclass(frozen=True) class CSTNode(ABC): - __slots__: ClassVar[Sequence[str]] = () - def __post_init__(self) -> None: # PERF: It might make more sense to move validation work into the visitor, which # would allow us to avoid validating the tree when parsing a file. @@ -238,7 +235,7 @@ class CSTNode(ABC): # validate return type of the user-defined `visitor.on_leave` method if not isinstance(leave_result, (CSTNode, RemovalSentinel, FlattenSentinel)): - raise CSTValidationError( + raise Exception( "Expected a node of type CSTNode or a RemovalSentinel, " + f"but got a return value of {type(leave_result).__name__}" ) @@ -293,7 +290,8 @@ class CSTNode(ABC): return False @abstractmethod - def _codegen_impl(self, state: CodegenState) -> None: ... + def _codegen_impl(self, state: CodegenState) -> None: + ... def _codegen(self, state: CodegenState, **kwargs: Any) -> None: state.before_codegen(self) @@ -383,7 +381,7 @@ class CSTNode(ABC): new_tree = self.visit(_ChildReplacementTransformer(old_node, new_node)) if isinstance(new_tree, (FlattenSentinel, RemovalSentinel)): # The above transform never returns *Sentinel, so this isn't possible - raise CSTLogicError("Logic error, cannot get a *Sentinel here!") + raise Exception("Logic error, cannot get a *Sentinal here!") return new_tree def deep_remove( @@ -400,7 +398,7 @@ class CSTNode(ABC): if isinstance(new_tree, FlattenSentinel): # The above transform never returns FlattenSentinel, so this isn't possible - raise CSTLogicError("Logic error, cannot get a FlattenSentinel here!") + raise Exception("Logic error, cannot get a FlattenSentinel here!") return new_tree @@ -422,7 +420,7 @@ class CSTNode(ABC): new_tree = self.visit(_ChildWithChangesTransformer(old_node, changes)) if isinstance(new_tree, (FlattenSentinel, RemovalSentinel)): # This is impossible with the above transform. - raise CSTLogicError("Logic error, cannot get a *Sentinel here!") + raise Exception("Logic error, cannot get a *Sentinel here!") return new_tree def __eq__(self: _CSTNodeSelfT, other: object) -> bool: @@ -470,8 +468,6 @@ class CSTNode(ABC): class BaseLeaf(CSTNode, ABC): - __slots__ = () - @property def children(self) -> Sequence[CSTNode]: # override this with an optimized implementation @@ -491,8 +487,6 @@ class BaseValueToken(BaseLeaf, ABC): into the parent CSTNode, and hard-coded into the implementation of _codegen. """ - __slots__ = () - value: str def _codegen_impl(self, state: CodegenState) -> None: diff --git a/libcst/_nodes/expression.py b/libcst/_nodes/expression.py index eb95d9b3..dba5faf3 100644 --- a/libcst/_nodes/expression.py +++ b/libcst/_nodes/expression.py @@ -15,9 +15,9 @@ from tokenize import ( Imagnumber as IMAGNUMBER_RE, Intnumber as INTNUMBER_RE, ) -from typing import Callable, Generator, Literal, Optional, Sequence, Union +from typing import Callable, Generator, Optional, Sequence, Union -from libcst import CSTLogicError +from typing_extensions import Literal from libcst._add_slots import add_slots from libcst._maybe_sentinel import MaybeSentinel @@ -222,8 +222,6 @@ class _BaseParenthesizedNode(CSTNode, ABC): this to get that functionality. """ - __slots__ = () - lpar: Sequence[LeftParen] = () # Sequence of parenthesis for precedence dictation. rpar: Sequence[RightParen] = () @@ -256,8 +254,6 @@ class BaseExpression(_BaseParenthesizedNode, ABC): An base class for all expressions. :class:`BaseExpression` contains no fields. """ - __slots__ = () - def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: """ Returns true if this expression is safe to be use with a word operator @@ -300,7 +296,7 @@ class BaseAssignTargetExpression(BaseExpression, ABC): `_. """ - __slots__ = () + pass class BaseDelTargetExpression(BaseExpression, ABC): @@ -320,7 +316,7 @@ class BaseDelTargetExpression(BaseExpression, ABC): `_. """ - __slots__ = () + pass @add_slots @@ -354,7 +350,7 @@ class Name(BaseAssignTargetExpression, BaseDelTargetExpression): if len(self.value) == 0: raise CSTValidationError("Cannot have empty name identifier.") if not self.value.isidentifier(): - raise CSTValidationError(f"Name {self.value!r} is not a valid identifier.") + raise CSTValidationError("Name is not a valid identifier.") def _codegen_impl(self, state: CodegenState) -> None: with self._parenthesize(state): @@ -397,8 +393,6 @@ class BaseNumber(BaseExpression, ABC): used anywhere that you need to explicitly take any number type. """ - __slots__ = () - def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: """ Numbers are funny. The expression "5in [1,2,3,4,5]" is a valid expression @@ -528,15 +522,13 @@ class BaseString(BaseExpression, ABC): :class:`SimpleString`, :class:`ConcatenatedString`, and :class:`FormattedString`. """ - __slots__ = () + pass StringQuoteLiteral = Literal['"', "'", '"""', "'''"] class _BasePrefixedString(BaseString, ABC): - __slots__ = () - @property def prefix(self) -> str: """ @@ -655,20 +647,14 @@ class SimpleString(_BasePrefixedString): if len(quote) == 2: # Let's assume this is an empty string. quote = quote[:1] - elif 3 < len(quote) <= 6: - # Let's assume this can be one of the following: - # >>> """"foo""" - # '"foo' - # >>> """""bar""" - # '""bar' - # >>> """""" - # '' + elif len(quote) == 6: + # Let's assume this is an empty triple-quoted string. quote = quote[:3] if len(quote) not in {1, 3}: # We shouldn't get here due to construction validation logic, # but handle the case anyway. - raise CSTLogicError(f"Invalid string {self.value}") + raise Exception("Invalid string {self.value}") # pyre-ignore We know via the above validation that we will only # ever return one of the four string literals. @@ -699,7 +685,7 @@ class SimpleString(_BasePrefixedString): state.add_token(self.value) @property - def evaluated_value(self) -> Union[str, bytes]: + def evaluated_value(self) -> str: """ Return an :func:`ast.literal_eval` evaluated str of :py:attr:`value`. """ @@ -713,7 +699,7 @@ class BaseFormattedStringContent(CSTNode, ABC): sequence of :class:`BaseFormattedStringContent` parts. """ - __slots__ = () + pass @add_slots @@ -958,253 +944,6 @@ class FormattedString(_BasePrefixedString): state.add_token(self.end) -class BaseTemplatedStringContent(CSTNode, ABC): - """ - The base type for :class:`TemplatedStringText` and - :class:`TemplatedStringExpression`. A :class:`TemplatedString` is composed of a - sequence of :class:`BaseTemplatedStringContent` parts. - """ - - __slots__ = () - - -@add_slots -@dataclass(frozen=True) -class TemplatedStringText(BaseTemplatedStringContent): - """ - Part of a :class:`TemplatedString` that is not inside curly braces (``{`` or ``}``). - For example, in:: - - f"ab{cd}ef" - - ``ab`` and ``ef`` are :class:`TemplatedStringText` nodes, but ``{cd}`` is a - :class:`TemplatedStringExpression`. - """ - - #: The raw string value, including any escape characters present in the source - #: code, not including any enclosing quotes. - value: str - - def _visit_and_replace_children( - self, visitor: CSTVisitorT - ) -> "TemplatedStringText": - return TemplatedStringText(value=self.value) - - def _codegen_impl(self, state: CodegenState) -> None: - state.add_token(self.value) - - -@add_slots -@dataclass(frozen=True) -class TemplatedStringExpression(BaseTemplatedStringContent): - """ - Part of a :class:`TemplatedString` that is inside curly braces (``{`` or ``}``), - including the surrounding curly braces. For example, in:: - - f"ab{cd}ef" - - ``{cd}`` is a :class:`TemplatedStringExpression`, but ``ab`` and ``ef`` are - :class:`TemplatedStringText` nodes. - - An t-string expression may contain ``conversion`` and ``format_spec`` suffixes that - control how the expression is converted to a string. - """ - - #: The expression we will evaluate and render when generating the string. - expression: BaseExpression - - #: An optional conversion specifier, such as ``!s``, ``!r`` or ``!a``. - conversion: Optional[str] = None - - #: An optional format specifier following the `format specification mini-language - #: `_. - format_spec: Optional[Sequence[BaseTemplatedStringContent]] = None - - #: Whitespace after the opening curly brace (``{``), but before the ``expression``. - whitespace_before_expression: BaseParenthesizableWhitespace = ( - SimpleWhitespace.field("") - ) - - #: Whitespace after the ``expression``, but before the ``conversion``, - #: ``format_spec`` and the closing curly brace (``}``). Python does not - #: allow whitespace inside or after a ``conversion`` or ``format_spec``. - whitespace_after_expression: BaseParenthesizableWhitespace = SimpleWhitespace.field( - "" - ) - - #: Equal sign for Templated string expression uses self-documenting expressions, - #: such as ``f"{x=}"``. See the `Python 3.8 release notes - #: `_. - equal: Optional[AssignEqual] = None - - def _validate(self) -> None: - if self.conversion is not None and self.conversion not in ("s", "r", "a"): - raise CSTValidationError("Invalid t-string conversion.") - - def _visit_and_replace_children( - self, visitor: CSTVisitorT - ) -> "TemplatedStringExpression": - format_spec = self.format_spec - return TemplatedStringExpression( - whitespace_before_expression=visit_required( - self, - "whitespace_before_expression", - self.whitespace_before_expression, - visitor, - ), - expression=visit_required(self, "expression", self.expression, visitor), - equal=visit_optional(self, "equal", self.equal, visitor), - whitespace_after_expression=visit_required( - self, - "whitespace_after_expression", - self.whitespace_after_expression, - visitor, - ), - conversion=self.conversion, - format_spec=( - visit_sequence(self, "format_spec", format_spec, visitor) - if format_spec is not None - else None - ), - ) - - def _codegen_impl(self, state: CodegenState) -> None: - state.add_token("{") - self.whitespace_before_expression._codegen(state) - self.expression._codegen(state) - equal = self.equal - if equal is not None: - equal._codegen(state) - self.whitespace_after_expression._codegen(state) - conversion = self.conversion - if conversion is not None: - state.add_token("!") - state.add_token(conversion) - format_spec = self.format_spec - if format_spec is not None: - state.add_token(":") - for spec in format_spec: - spec._codegen(state) - state.add_token("}") - - -@add_slots -@dataclass(frozen=True) -class TemplatedString(_BasePrefixedString): - """ - An "t-string". Template strings are a generalization of f-strings, - using a t in place of the f prefix. Instead of evaluating to str, - t-strings evaluate to a new type: Template - - T-Strings are defined in 'PEP 750' - - >>> import libcst as cst - >>> cst.parse_expression('t"ab{cd}ef"') - TemplatedString( - parts=[ - TemplatedStringText( - value='ab', - ), - TemplatedStringExpression( - expression=Name( - value='cd', - lpar=[], - rpar=[], - ), - conversion=None, - format_spec=None, - whitespace_before_expression=SimpleWhitespace( - value='', - ), - whitespace_after_expression=SimpleWhitespace( - value='', - ), - equal=None, - ), - TemplatedStringText( - value='ef', - ), - ], - start='t"', - end='"', - lpar=[], - rpar=[], - ) - >>> - """ - - #: A templated string is composed as a series of :class:`TemplatedStringText` and - #: :class:`TemplatedStringExpression` parts. - parts: Sequence[BaseTemplatedStringContent] - - #: The string prefix and the leading quote, such as ``t"``, ``T'``, ``tr"``, or - #: ``t"""``. - start: str = 't"' - - #: The trailing quote. This must match the type of quote used in ``start``. - end: Literal['"', "'", '"""', "'''"] = '"' - - lpar: Sequence[LeftParen] = () - #: Sequence of parenthesis for precidence dictation. - rpar: Sequence[RightParen] = () - - def _validate(self) -> None: - super(_BasePrefixedString, self)._validate() - - # Validate any prefix - prefix = self.prefix - if prefix not in ("t", "tr", "rt"): - raise CSTValidationError("Invalid t-string prefix.") - - # Validate wrapping quotes - starttoken = self.start[len(prefix) :] - if starttoken != self.end: - raise CSTValidationError("t-string must have matching enclosing quotes.") - - # Validate valid wrapping quote usage - if starttoken not in ('"', "'", '"""', "'''"): - raise CSTValidationError("Invalid t-string enclosing quotes.") - - @property - def prefix(self) -> str: - """ - Returns the string's prefix, if any exists. The prefix can be ``t``, - ``tr``, or ``rt``. - """ - - prefix = "" - for c in self.start: - if c in ['"', "'"]: - break - prefix += c - return prefix.lower() - - @property - def quote(self) -> StringQuoteLiteral: - """ - Returns the quotation used to denote the string. Can be either ``'``, - ``"``, ``'''`` or ``\"\"\"``. - """ - - return self.end - - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TemplatedString": - return TemplatedString( - lpar=visit_sequence(self, "lpar", self.lpar, visitor), - start=self.start, - parts=visit_sequence(self, "parts", self.parts, visitor), - end=self.end, - rpar=visit_sequence(self, "rpar", self.rpar, visitor), - ) - - def _codegen_impl(self, state: CodegenState) -> None: - with self._parenthesize(state): - state.add_token(self.start) - for part in self.parts: - part._codegen(state) - state.add_token(self.end) - - @add_slots @dataclass(frozen=True) class ConcatenatedString(BaseString): @@ -1259,7 +998,7 @@ class ConcatenatedString(BaseString): elif isinstance(right, FormattedString): rightbytes = "b" in right.prefix else: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") if leftbytes != rightbytes: raise CSTValidationError("Cannot concatenate string and bytes.") @@ -1281,7 +1020,7 @@ class ConcatenatedString(BaseString): self.right._codegen(state) @property - def evaluated_value(self) -> Union[str, bytes, None]: + def evaluated_value(self) -> Optional[str]: """ Return an :func:`ast.literal_eval` evaluated str of recursively concatenated :py:attr:`left` and :py:attr:`right` if and only if both :py:attr:`left` and :py:attr:`right` are composed by :class:`SimpleString` or :class:`ConcatenatedString` @@ -1295,11 +1034,7 @@ class ConcatenatedString(BaseString): right_val = right.evaluated_value if right_val is None: return None - if isinstance(left_val, bytes) and isinstance(right_val, bytes): - return left_val + right_val - if isinstance(left_val, str) and isinstance(right_val, str): - return left_val + right_val - return None + return left_val + right_val @add_slots @@ -1680,8 +1415,6 @@ class BaseSlice(CSTNode, ABC): This node is purely for typing. """ - __slots__ = () - @add_slots @dataclass(frozen=True) @@ -1694,29 +1427,10 @@ class Index(BaseSlice): #: The index value itself. value: BaseExpression - #: An optional string with an asterisk appearing before the name. This is - #: expanded into variable number of positional arguments. See PEP-646 - star: Optional[Literal["*"]] = None - - #: Whitespace after the ``star`` (if it exists), but before the ``value``. - whitespace_after_star: Optional[BaseParenthesizableWhitespace] = None - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "Index": - return Index( - star=self.star, - whitespace_after_star=visit_optional( - self, "whitespace_after_star", self.whitespace_after_star, visitor - ), - value=visit_required(self, "value", self.value, visitor), - ) + return Index(value=visit_required(self, "value", self.value, visitor)) def _codegen_impl(self, state: CodegenState) -> None: - star = self.star - if star is not None: - state.add_token(star) - ws = self.whitespace_after_star - if ws is not None: - ws._codegen(state) self.value._codegen(state) @@ -1896,9 +1610,9 @@ class Annotation(CSTNode): #: colon or arrow. annotation: BaseExpression - whitespace_before_indicator: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) + whitespace_before_indicator: Union[ + BaseParenthesizableWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT whitespace_after_indicator: BaseParenthesizableWhitespace = SimpleWhitespace.field( " " ) @@ -1937,7 +1651,7 @@ class Annotation(CSTNode): if default_indicator == "->": state.add_token(" ") else: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") # Now, output the indicator and the rest of the annotation state.add_token(default_indicator) @@ -1982,26 +1696,15 @@ class ParamSlash(CSTNode): .. _PEP 570: https://www.python.org/dev/peps/pep-0570/#specification """ - #: Optional comma that comes after the slash. This comma doesn't own the whitespace - #: between ``/`` and ``,``. + # Optional comma that comes after the slash. comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT - #: Whitespace after the ``/`` character. This is captured here in case there is a - #: comma. - whitespace_after: BaseParenthesizableWhitespace = SimpleWhitespace.field("") - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ParamSlash": - return ParamSlash( - comma=visit_sentinel(self, "comma", self.comma, visitor), - whitespace_after=visit_required( - self, "whitespace_after", self.whitespace_after, visitor - ), - ) + return ParamSlash(comma=visit_sentinel(self, "comma", self.comma, visitor)) def _codegen_impl(self, state: CodegenState, default_comma: bool = False) -> None: state.add_token("/") - self.whitespace_after._codegen(state) comma = self.comma if comma is MaybeSentinel.DEFAULT and default_comma: state.add_token(", ") @@ -2230,25 +1933,6 @@ class Parameters(CSTNode): star_kwarg=visit_optional(self, "star_kwarg", self.star_kwarg, visitor), ) - def _safe_to_join_with_lambda(self) -> bool: - """ - Determine if Parameters need a space after the `lambda` keyword. Returns True - iff it's safe to omit the space between `lambda` and these Parameters. - - See also `BaseExpression._safe_to_use_with_word_operator`. - - For example: `lambda*_: pass` - """ - if len(self.posonly_params) != 0: - return False - - # posonly_ind can't appear if above condition is false - - if len(self.params) > 0 and self.params[0].star not in {"*", "**"}: - return False - - return True - def _codegen_impl(self, state: CodegenState) -> None: # noqa: C901 # Compute the star existence first so we can ask about whether # each element is the last in the list or not. @@ -2350,16 +2034,9 @@ class Lambda(BaseExpression): rpar: Sequence[RightParen] = () #: Whitespace after the lambda keyword, but before any argument or the colon. - whitespace_after_lambda: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) - - def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: - if position == ExpressionPosition.LEFT: - return len(self.rpar) > 0 or self.body._safe_to_use_with_word_operator( - position - ) - return super()._safe_to_use_with_word_operator(position) + whitespace_after_lambda: Union[ + BaseParenthesizableWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT def _validate(self) -> None: # Validate parents @@ -2388,7 +2065,6 @@ class Lambda(BaseExpression): if ( isinstance(whitespace_after_lambda, BaseParenthesizableWhitespace) and whitespace_after_lambda.empty - and not self.params._safe_to_join_with_lambda() ): raise CSTValidationError( "Must have at least one space after lambda when specifying params" @@ -2514,8 +2190,6 @@ class _BaseExpressionWithArgs(BaseExpression, ABC): in typing. So, we have common validation functions here. """ - __slots__ = () - #: Sequence of arguments that will be passed to the function call. args: Sequence[Arg] = () @@ -2707,12 +2381,7 @@ class Await(BaseExpression): # Validate any super-class stuff, whatever it may be. super(Await, self)._validate() # Make sure we don't run identifiers together. - if ( - self.whitespace_after_await.empty - and not self.expression._safe_to_use_with_word_operator( - ExpressionPosition.RIGHT - ) - ): + if self.whitespace_after_await.empty: raise CSTValidationError("Must have at least one space after await") def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "Await": @@ -2766,12 +2435,6 @@ class IfExp(BaseExpression): #: Whitespace after the ``else`` keyword, but before the ``orelse`` expression. whitespace_after_else: BaseParenthesizableWhitespace = SimpleWhitespace.field(" ") - def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: - if position == ExpressionPosition.RIGHT: - return self.body._safe_to_use_with_word_operator(position) - else: - return self.orelse._safe_to_use_with_word_operator(position) - def _validate(self) -> None: # Paren validation and such super(IfExp, self)._validate() @@ -2850,9 +2513,9 @@ class From(CSTNode): item: BaseExpression #: The whitespace at the very start of this node. - whitespace_before_from: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) + whitespace_before_from: Union[ + BaseParenthesizableWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT #: The whitespace after the ``from`` keyword, but before the ``item``. whitespace_after_from: BaseParenthesizableWhitespace = SimpleWhitespace.field(" ") @@ -2911,9 +2574,9 @@ class Yield(BaseExpression): rpar: Sequence[RightParen] = () #: Whitespace after the ``yield`` keyword, but before the ``value``. - whitespace_after_yield: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) + whitespace_after_yield: Union[ + BaseParenthesizableWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT def _validate(self) -> None: # Paren rules and such @@ -2968,8 +2631,6 @@ class _BaseElementImpl(CSTNode, ABC): An internal base class for :class:`Element` and :class:`DictElement`. """ - __slots__ = () - value: BaseExpression comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT @@ -2997,7 +2658,8 @@ class _BaseElementImpl(CSTNode, ABC): state: CodegenState, default_comma: bool = False, default_comma_whitespace: bool = False, # False for a single-item collection - ) -> None: ... + ) -> None: + ... class BaseElement(_BaseElementImpl, ABC): @@ -3006,8 +2668,6 @@ class BaseElement(_BaseElementImpl, ABC): BaseDictElement. """ - __slots__ = () - class BaseDictElement(_BaseElementImpl, ABC): """ @@ -3015,8 +2675,6 @@ class BaseDictElement(_BaseElementImpl, ABC): BaseElement. """ - __slots__ = () - @add_slots @dataclass(frozen=True) @@ -3103,7 +2761,7 @@ class DictElement(BaseDictElement): @add_slots @dataclass(frozen=True) -class StarredElement(BaseElement, BaseExpression, _BaseParenthesizedNode): +class StarredElement(BaseElement, _BaseParenthesizedNode): """ A starred ``*value`` element that expands to represent multiple values in a literal :class:`List`, :class:`Tuple`, or :class:`Set`. @@ -3299,8 +2957,6 @@ class BaseList(BaseExpression, ABC): object when evaluated. """ - __slots__ = () - lbracket: LeftSquareBracket = LeftSquareBracket.field() #: Brackets surrounding the list. rbracket: RightSquareBracket = RightSquareBracket.field() @@ -3381,8 +3037,6 @@ class _BaseSetOrDict(BaseExpression, ABC): shouldn't be exported. """ - __slots__ = () - lbrace: LeftCurlyBrace = LeftCurlyBrace.field() #: Braces surrounding the set or dict. rbrace: RightCurlyBrace = RightCurlyBrace.field() @@ -3408,8 +3062,6 @@ class BaseSet(_BaseSetOrDict, ABC): a set object when evaluated. """ - __slots__ = () - @add_slots @dataclass(frozen=True) @@ -3479,8 +3131,6 @@ class BaseDict(_BaseSetOrDict, ABC): a dict object when evaluated. """ - __slots__ = () - @add_slots @dataclass(frozen=True) @@ -3757,8 +3407,6 @@ class BaseComp(BaseExpression, ABC): :class:`GeneratorExp`, :class:`ListComp`, :class:`SetComp`, and :class:`DictComp`. """ - __slots__ = () - for_in: CompFor @@ -3769,12 +3417,10 @@ class BaseSimpleComp(BaseComp, ABC): ``value``. """ - __slots__ = () - #: The expression evaluated during each iteration of the comprehension. This #: lexically comes before the ``for_in`` clause, but it is semantically the #: inner-most element, evaluated inside the ``for_in`` clause. - elt: BaseExpression + elt: BaseAssignTargetExpression #: The ``for ... in ... if ...`` clause that lexically comes after ``elt``. This may #: be a nested structure for nested comprehensions. See :class:`CompFor` for @@ -3807,7 +3453,7 @@ class GeneratorExp(BaseSimpleComp): """ #: The expression evaluated and yielded during each iteration of the generator. - elt: BaseExpression + elt: BaseAssignTargetExpression #: The ``for ... in ... if ...`` clause that comes after ``elt``. This may be a #: nested structure for nested comprehensions. See :class:`CompFor` for details. @@ -3858,7 +3504,7 @@ class ListComp(BaseList, BaseSimpleComp): """ #: The expression evaluated and stored during each iteration of the comprehension. - elt: BaseExpression + elt: BaseAssignTargetExpression #: The ``for ... in ... if ...`` clause that comes after ``elt``. This may be a #: nested structure for nested comprehensions. See :class:`CompFor` for details. @@ -3900,7 +3546,7 @@ class SetComp(BaseSet, BaseSimpleComp): """ #: The expression evaluated and stored during each iteration of the comprehension. - elt: BaseExpression + elt: BaseAssignTargetExpression #: The ``for ... in ... if ...`` clause that comes after ``elt``. This may be a #: nested structure for nested comprehensions. See :class:`CompFor` for details. @@ -3942,10 +3588,10 @@ class DictComp(BaseDict, BaseComp): """ #: The key inserted into the dictionary during each iteration of the comprehension. - key: BaseExpression + key: BaseAssignTargetExpression #: The value associated with the ``key`` inserted into the dictionary during each #: iteration of the comprehension. - value: BaseExpression + value: BaseAssignTargetExpression #: The ``for ... in ... if ...`` clause that lexically comes after ``key`` and #: ``value``. This may be a nested structure for nested comprehensions. See @@ -4049,15 +3695,6 @@ class NamedExpr(BaseExpression): rpar=visit_sequence(self, "rpar", self.rpar, visitor), ) - def _safe_to_use_with_word_operator(self, position: ExpressionPosition) -> bool: - if position == ExpressionPosition.LEFT: - return len(self.rpar) > 0 or self.value._safe_to_use_with_word_operator( - position - ) - return len(self.lpar) > 0 or self.target._safe_to_use_with_word_operator( - position - ) - def _codegen_impl(self, state: CodegenState) -> None: with self._parenthesize(state): self.target._codegen(state) diff --git a/libcst/_nodes/module.py b/libcst/_nodes/module.py index 9ed45716..149a4375 100644 --- a/libcst/_nodes/module.py +++ b/libcst/_nodes/module.py @@ -79,6 +79,7 @@ class Module(CSTNode): has_trailing_newline=self.has_trailing_newline, ) + # pyre-fixme[14]: `visit` overrides method defined in `CSTNode` inconsistently. def visit(self: _ModuleSelfT, visitor: CSTVisitorT) -> _ModuleSelfT: """ Returns the result of running a visitor over this module. diff --git a/libcst/_nodes/op.py b/libcst/_nodes/op.py index 1765f536..ea02835a 100644 --- a/libcst/_nodes/op.py +++ b/libcst/_nodes/op.py @@ -19,8 +19,6 @@ class _BaseOneTokenOp(CSTNode, ABC): Any node that has a static value and needs to own whitespace on both sides. """ - __slots__ = () - whitespace_before: BaseParenthesizableWhitespace whitespace_after: BaseParenthesizableWhitespace @@ -43,7 +41,8 @@ class _BaseOneTokenOp(CSTNode, ABC): self.whitespace_after._codegen(state) @abstractmethod - def _get_token(self) -> str: ... + def _get_token(self) -> str: + ... class _BaseTwoTokenOp(CSTNode, ABC): @@ -52,8 +51,6 @@ class _BaseTwoTokenOp(CSTNode, ABC): in beteween them. """ - __slots__ = () - whitespace_before: BaseParenthesizableWhitespace whitespace_between: BaseParenthesizableWhitespace @@ -87,7 +84,8 @@ class _BaseTwoTokenOp(CSTNode, ABC): self.whitespace_after._codegen(state) @abstractmethod - def _get_tokens(self) -> Tuple[str, str]: ... + def _get_tokens(self) -> Tuple[str, str]: + ... class BaseUnaryOp(CSTNode, ABC): @@ -95,8 +93,6 @@ class BaseUnaryOp(CSTNode, ABC): Any node that has a static value used in a :class:`UnaryOperation` expression. """ - __slots__ = () - #: Any space that appears directly after this operator. whitespace_after: BaseParenthesizableWhitespace @@ -113,7 +109,8 @@ class BaseUnaryOp(CSTNode, ABC): self.whitespace_after._codegen(state) @abstractmethod - def _get_token(self) -> str: ... + def _get_token(self) -> str: + ... class BaseBooleanOp(_BaseOneTokenOp, ABC): @@ -122,8 +119,6 @@ class BaseBooleanOp(_BaseOneTokenOp, ABC): This node is purely for typing. """ - __slots__ = () - class BaseBinaryOp(CSTNode, ABC): """ @@ -131,8 +126,6 @@ class BaseBinaryOp(CSTNode, ABC): This node is purely for typing. """ - __slots__ = () - class BaseCompOp(CSTNode, ABC): """ @@ -140,8 +133,6 @@ class BaseCompOp(CSTNode, ABC): This node is purely for typing. """ - __slots__ = () - class BaseAugOp(CSTNode, ABC): """ @@ -149,8 +140,6 @@ class BaseAugOp(CSTNode, ABC): This node is purely for typing. """ - __slots__ = () - @add_slots @dataclass(frozen=True) diff --git a/libcst/_nodes/statement.py b/libcst/_nodes/statement.py index cdc49edc..9493f57c 100644 --- a/libcst/_nodes/statement.py +++ b/libcst/_nodes/statement.py @@ -7,9 +7,7 @@ import inspect import re from abc import ABC, abstractmethod from dataclasses import dataclass, field -from typing import Literal, Optional, Pattern, Sequence, Union - -from libcst import CSTLogicError +from typing import Optional, Pattern, Sequence, Union from libcst._add_slots import add_slots from libcst._maybe_sentinel import MaybeSentinel @@ -23,6 +21,7 @@ from libcst._nodes.expression import ( BaseAssignTargetExpression, BaseDelTargetExpression, BaseExpression, + Call, ConcatenatedString, ExpressionPosition, From, @@ -50,7 +49,6 @@ from libcst._nodes.op import ( AssignEqual, BaseAugOp, BitOr, - Colon, Comma, Dot, ImportStar, @@ -81,8 +79,6 @@ class BaseSuite(CSTNode, ABC): -- https://docs.python.org/3/reference/compound_stmts.html """ - __slots__ = () - body: Union[Sequence["BaseStatement"], Sequence["BaseSmallStatement"]] @@ -92,7 +88,7 @@ class BaseStatement(CSTNode, ABC): in a particular location. """ - __slots__ = () + pass class BaseSmallStatement(CSTNode, ABC): @@ -103,8 +99,6 @@ class BaseSmallStatement(CSTNode, ABC): simplify type definitions and isinstance checks. """ - __slots__ = () - #: An optional semicolon that appears after a small statement. This is optional #: for the last small statement in a :class:`SimpleStatementLine` or #: :class:`SimpleStatementSuite`, but all other small statements inside a simple @@ -115,7 +109,8 @@ class BaseSmallStatement(CSTNode, ABC): @abstractmethod def _codegen_impl( self, state: CodegenState, default_semicolon: bool = False - ) -> None: ... + ) -> None: + ... @add_slots @@ -274,9 +269,9 @@ class Return(BaseSmallStatement): #: Optional whitespace after the ``return`` keyword before the optional #: value expression. - whitespace_after_return: Union[SimpleWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) + whitespace_after_return: Union[ + SimpleWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT #: Optional semicolon when this is used in a statement line. This semicolon #: owns the whitespace on both sides of it when it is used. @@ -375,8 +370,6 @@ class _BaseSimpleStatement(CSTNode, ABC): small statement. """ - __slots__ = () - #: Sequence of small statements. All but the last statement are required to have #: a semicolon. body: Sequence[BaseSmallStatement] @@ -561,8 +554,6 @@ class BaseCompoundStatement(BaseStatement, ABC): -- https://docs.python.org/3/reference/compound_stmts.html """ - __slots__ = () - #: The body of this compound statement. body: BaseSuite @@ -600,12 +591,7 @@ class If(BaseCompoundStatement): #: The whitespace appearing after the test expression but before the colon. whitespace_after_test: SimpleWhitespace = SimpleWhitespace.field("") - def _validate(self) -> None: - if ( - self.whitespace_before_test.empty - and not self.test._safe_to_use_with_word_operator(ExpressionPosition.RIGHT) - ): - raise CSTValidationError("Must have at least one space after 'if' keyword.") + # TODO: _validate def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "If": return If( @@ -752,13 +738,12 @@ class AsName(CSTNode): whitespace_after_as: BaseParenthesizableWhitespace = SimpleWhitespace.field(" ") def _validate(self) -> None: - if ( - self.whitespace_after_as.empty - and not self.name._safe_to_use_with_word_operator(ExpressionPosition.RIGHT) - ): + if self.whitespace_after_as.empty: raise CSTValidationError( "There must be at least one space between 'as' and name." ) + if self.whitespace_before_as.empty: + raise CSTValidationError("There must be at least one space before 'as'.") def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "AsName": return AsName( @@ -822,16 +807,6 @@ class ExceptHandler(CSTNode): raise CSTValidationError( "Must have at least one space after except when ExceptHandler has a type." ) - name = self.name - if ( - type_ is not None - and name is not None - and name.whitespace_before_as.empty - and not type_._safe_to_use_with_word_operator(ExpressionPosition.LEFT) - ): - raise CSTValidationError( - "Must have at least one space before as keyword in an except handler." - ) def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ExceptHandler": return ExceptHandler( @@ -1156,21 +1131,18 @@ class ImportAlias(CSTNode): def _validate(self) -> None: asname = self.asname - if asname is not None: - if not isinstance(asname.name, Name): - raise CSTValidationError( - "Must use a Name node for AsName name inside ImportAlias." - ) - if asname.whitespace_before_as.empty: - raise CSTValidationError( - "Must have at least one space before as keyword in an ImportAlias." - ) + if asname is not None and not isinstance(asname.name, Name): + raise CSTValidationError( + "Must use a Name node for AsName name inside ImportAlias." + ) try: self.evaluated_name - except CSTLogicError as e: - raise CSTValidationError( - "The imported name must be a valid qualified name." - ) from e + except Exception as e: + if str(e) == "Logic error!": + raise CSTValidationError( + "The imported name must be a valid qualified name." + ) + raise e def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ImportAlias": return ImportAlias( @@ -1199,7 +1171,7 @@ class ImportAlias(CSTNode): elif isinstance(node, Attribute): return f"{self._name(node.value)}.{node.attr.value}" else: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") @property def evaluated_name(self) -> str: @@ -1626,7 +1598,7 @@ class Decorator(CSTNode): #: The decorator that will return a new function wrapping the parent #: of this decorator. - decorator: BaseExpression + decorator: Union[Name, Attribute, Call] #: Line comments and empty lines before this decorator. The parent #: :class:`FunctionDef` or :class:`ClassDef` node owns leading lines before @@ -1639,6 +1611,19 @@ class Decorator(CSTNode): #: Optional trailing comment and newline following the decorator before the next line. trailing_whitespace: TrailingWhitespace = TrailingWhitespace.field() + def _validate(self) -> None: + decorator = self.decorator + if len(decorator.lpar) > 0 or len(decorator.rpar) > 0: + raise CSTValidationError( + "Cannot have parens around decorator in a Decorator." + ) + if isinstance(decorator, Call) and not isinstance( + decorator.func, (Name, Attribute) + ): + raise CSTValidationError( + "Decorator call function must be Name or Attribute node." + ) + def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "Decorator": return Decorator( leading_lines=visit_sequence( @@ -1694,8 +1679,6 @@ def get_docstring_impl( evaluated_value = val.evaluated_value else: return None - if isinstance(evaluated_value, bytes): - return None if evaluated_value is not None and clean: return inspect.cleandoc(evaluated_value) @@ -1743,8 +1726,8 @@ class FunctionDef(BaseCompoundStatement): #: Whitespace after the ``def`` keyword and before the function name. whitespace_after_def: SimpleWhitespace = SimpleWhitespace.field(" ") - #: Whitespace after the function name and before the type parameters or the opening - #: parenthesis for the parameters. + #: Whitespace after the function name and before the opening parenthesis for + #: the parameters. whitespace_after_name: SimpleWhitespace = SimpleWhitespace.field("") #: Whitespace after the opening parenthesis for the parameters but before @@ -1755,13 +1738,6 @@ class FunctionDef(BaseCompoundStatement): #: the colon. whitespace_before_colon: SimpleWhitespace = SimpleWhitespace.field("") - #: An optional declaration of type parameters. - type_parameters: Optional["TypeParameters"] = None - - #: Whitespace between the type parameters and the opening parenthesis for the - #: (non-type) parameters. - whitespace_after_type_parameters: SimpleWhitespace = SimpleWhitespace.field("") - def _validate(self) -> None: if len(self.name.lpar) > 0 or len(self.name.rpar) > 0: raise CSTValidationError("Cannot have parens around Name in a FunctionDef.") @@ -1770,15 +1746,6 @@ class FunctionDef(BaseCompoundStatement): "There must be at least one space between 'def' and name." ) - if ( - self.type_parameters is None - and not self.whitespace_after_type_parameters.empty - ): - raise CSTValidationError( - "whitespace_after_type_parameters must be empty if there are no type " - "parameters in FunctionDef" - ) - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "FunctionDef": return FunctionDef( leading_lines=visit_sequence( @@ -1798,15 +1765,6 @@ class FunctionDef(BaseCompoundStatement): whitespace_after_name=visit_required( self, "whitespace_after_name", self.whitespace_after_name, visitor ), - type_parameters=visit_optional( - self, "type_parameters", self.type_parameters, visitor - ), - whitespace_after_type_parameters=visit_required( - self, - "whitespace_after_type_parameters", - self.whitespace_after_type_parameters, - visitor, - ), whitespace_before_params=visit_required( self, "whitespace_before_params", self.whitespace_before_params, visitor ), @@ -1835,10 +1793,6 @@ class FunctionDef(BaseCompoundStatement): self.whitespace_after_def._codegen(state) self.name._codegen(state) self.whitespace_after_name._codegen(state) - type_params = self.type_parameters - if type_params is not None: - type_params._codegen(state) - self.whitespace_after_type_parameters._codegen(state) state.add_token("(") self.whitespace_before_params._codegen(state) self.params._codegen(state) @@ -1900,34 +1854,19 @@ class ClassDef(BaseCompoundStatement): #: Whitespace after the ``class`` keyword and before the class name. whitespace_after_class: SimpleWhitespace = SimpleWhitespace.field(" ") - #: Whitespace after the class name and before the type parameters or the opening - #: parenthesis for the bases and keywords. + #: Whitespace after the class name and before the opening parenthesis for + #: the bases and keywords. whitespace_after_name: SimpleWhitespace = SimpleWhitespace.field("") #: Whitespace after the closing parenthesis or class name and before #: the colon. whitespace_before_colon: SimpleWhitespace = SimpleWhitespace.field("") - #: An optional declaration of type parameters. - type_parameters: Optional["TypeParameters"] = None - - #: Whitespace between type parameters and opening parenthesis for the bases and - #: keywords. - whitespace_after_type_parameters: SimpleWhitespace = SimpleWhitespace.field("") - def _validate_whitespace(self) -> None: if self.whitespace_after_class.empty: raise CSTValidationError( "There must be at least one space between 'class' and name." ) - if ( - self.type_parameters is None - and not self.whitespace_after_type_parameters.empty - ): - raise CSTValidationError( - "whitespace_after_type_parameters must be empty if there are no type" - "parameters in a ClassDef" - ) def _validate_parens(self) -> None: if len(self.name.lpar) > 0 or len(self.name.rpar) > 0: @@ -1970,15 +1909,6 @@ class ClassDef(BaseCompoundStatement): whitespace_after_name=visit_required( self, "whitespace_after_name", self.whitespace_after_name, visitor ), - type_parameters=visit_optional( - self, "type_parameters", self.type_parameters, visitor - ), - whitespace_after_type_parameters=visit_required( - self, - "whitespace_after_type_parameters", - self.whitespace_after_type_parameters, - visitor, - ), lpar=visit_sentinel(self, "lpar", self.lpar, visitor), bases=visit_sequence(self, "bases", self.bases, visitor), keywords=visit_sequence(self, "keywords", self.keywords, visitor), @@ -2003,10 +1933,6 @@ class ClassDef(BaseCompoundStatement): self.whitespace_after_class._codegen(state) self.name._codegen(state) self.whitespace_after_name._codegen(state) - type_params = self.type_parameters - if type_params is not None: - type_params._codegen(state) - self.whitespace_after_type_parameters._codegen(state) lpar = self.lpar if isinstance(lpar, MaybeSentinel): if self.bases or self.keywords: @@ -2052,15 +1978,6 @@ class WithItem(CSTNode): #: other items inside a with block must contain a comma to separate them. comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT - def _validate(self) -> None: - asname = self.asname - if ( - asname is not None - and asname.whitespace_before_as.empty - and not self.item._safe_to_use_with_word_operator(ExpressionPosition.LEFT) - ): - raise CSTValidationError("Must have at least one space before as keyword.") - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "WithItem": return WithItem( item=visit_required(self, "item", self.item, visitor), @@ -2402,9 +2319,9 @@ class Raise(BaseSmallStatement): cause: Optional[From] = None #: Any whitespace appearing between the ``raise`` keyword and the exception. - whitespace_after_raise: Union[SimpleWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) + whitespace_after_raise: Union[ + SimpleWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT #: Optional semicolon when this is used in a statement line. This semicolon #: owns the whitespace on both sides of it when it is used. @@ -2716,8 +2633,6 @@ class MatchPattern(_BaseParenthesizedNode, ABC): statement. """ - __slots__ = () - @add_slots @dataclass(frozen=True) @@ -2762,6 +2677,11 @@ class Match(BaseCompoundStatement): if len(self.cases) == 0: raise CSTValidationError("A match statement must have at least one case.") + if self.whitespace_after_match.empty: + raise CSTValidationError( + "Must have at least one space after a 'match' keyword" + ) + indent = self.indent if indent is not None: if len(indent) == 0: @@ -2858,16 +2778,17 @@ class MatchCase(CSTNode): self, "whitespace_after_case", self.whitespace_after_case, visitor ), pattern=visit_required(self, "pattern", self.pattern, visitor), - whitespace_before_if=visit_required( + # pyre-fixme[6]: Expected `SimpleWhitespace` for 4th param but got + # `Optional[SimpleWhitespace]`. + whitespace_before_if=visit_optional( self, "whitespace_before_if", self.whitespace_before_if, visitor ), - whitespace_after_if=visit_required( + # pyre-fixme[6]: Expected `SimpleWhitespace` for 5th param but got + # `Optional[SimpleWhitespace]`. + whitespace_after_if=visit_optional( self, "whitespace_after_if", self.whitespace_after_if, visitor ), guard=visit_optional(self, "guard", self.guard, visitor), - whitespace_before_colon=visit_required( - self, "whitespace_before_colon", self.whitespace_before_colon, visitor - ), body=visit_required(self, "body", self.body, visitor), ) @@ -2886,9 +2807,6 @@ class MatchCase(CSTNode): state.add_token("if") self.whitespace_after_if._codegen(state) guard._codegen(state) - else: - self.whitespace_before_if._codegen(state) - self.whitespace_after_if._codegen(state) self.whitespace_before_colon._codegen(state) state.add_token(":") @@ -2920,14 +2838,6 @@ class MatchValue(MatchPattern): def lpar(self, value: Sequence[LeftParen]) -> None: self.value.lpar = value - @property - def rpar(self) -> Sequence[RightParen]: - return self.value.rpar - - @rpar.setter - def rpar(self, value: Sequence[RightParen]) -> None: - self.value.rpar = value - @add_slots @dataclass(frozen=True) @@ -2961,15 +2871,6 @@ class MatchSingleton(MatchPattern): # pyre-fixme[41]: Cannot reassign final attribute `lpar`. self.value.lpar = value - @property - def rpar(self) -> Sequence[RightParen]: - return self.value.rpar - - @rpar.setter - def rpar(self, value: Sequence[RightParen]) -> None: - # pyre-fixme[41]: Cannot reassign final attribute `rpar`. - self.value.rpar = value - @add_slots @dataclass(frozen=True) @@ -3059,8 +2960,6 @@ class MatchSequence(MatchPattern, ABC): otherwise matches a fixed length sequence. """ - __slots__ = () - #: Patterns to be matched against the subject elements if it is a sequence. patterns: Sequence[Union[MatchSequenceElement, MatchStar]] @@ -3077,10 +2976,10 @@ class MatchList(MatchSequence): patterns: Sequence[Union[MatchSequenceElement, MatchStar]] #: An optional left bracket. If missing, this is an open sequence pattern. - lbracket: Optional[LeftSquareBracket] = None + lbracket: Optional[LeftSquareBracket] = LeftSquareBracket.field() #: An optional left bracket. If missing, this is an open sequence pattern. - rbracket: Optional[RightSquareBracket] = None + rbracket: Optional[RightSquareBracket] = RightSquareBracket.field() #: Parenthesis at the beginning of the node lpar: Sequence[LeftParen] = () @@ -3388,7 +3287,6 @@ class MatchClass(MatchPattern): whitespace_after_kwds=visit_required( self, "whitespace_after_kwds", self.whitespace_after_kwds, visitor ), - rpar=visit_sequence(self, "rpar", self.rpar, visitor), ) def _codegen_impl(self, state: CodegenState) -> None: @@ -3425,15 +3323,15 @@ class MatchAs(MatchPattern): #: Whitespace between ``pattern`` and the ``as`` keyword (if ``pattern`` is not #: ``None``) - whitespace_before_as: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) + whitespace_before_as: Union[ + BaseParenthesizableWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT #: Whitespace between the ``as`` keyword and ``name`` (if ``pattern`` is not #: ``None``) - whitespace_after_as: Union[BaseParenthesizableWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) + whitespace_after_as: Union[ + BaseParenthesizableWhitespace, MaybeSentinel + ] = MaybeSentinel.DEFAULT #: Parenthesis at the beginning of the node lpar: Sequence[LeftParen] = () @@ -3476,13 +3374,6 @@ class MatchAs(MatchPattern): state.add_token(" ") elif isinstance(ws_after, BaseParenthesizableWhitespace): ws_after._codegen(state) - else: - ws_before = self.whitespace_before_as - if isinstance(ws_before, BaseParenthesizableWhitespace): - ws_before._codegen(state) - ws_after = self.whitespace_after_as - if isinstance(ws_after, BaseParenthesizableWhitespace): - ws_after._codegen(state) if name is None: state.add_token("_") else: @@ -3548,326 +3439,3 @@ class MatchOr(MatchPattern): pats = self.patterns for idx, pat in enumerate(pats): pat._codegen(state, default_separator=idx + 1 < len(pats)) - - -@add_slots -@dataclass(frozen=True) -class TypeVar(CSTNode): - """ - A simple (non-variadic) type variable. - - Note: this node represents type a variable when declared using PEP-695 syntax. - """ - - #: The name of the type variable. - name: Name - - #: An optional bound on the type. - bound: Optional[BaseExpression] = None - - #: The colon used to separate the name and bound. If not specified, - #: :class:`MaybeSentinel` will be replaced with a colon if there is a bound, - #: otherwise will be left empty. - colon: Union[Colon, MaybeSentinel] = MaybeSentinel.DEFAULT - - def _codegen_impl(self, state: CodegenState) -> None: - with state.record_syntactic_position(self): - self.name._codegen(state) - bound = self.bound - colon = self.colon - if not isinstance(colon, MaybeSentinel): - colon._codegen(state) - else: - if bound is not None: - state.add_token(": ") - - if bound is not None: - bound._codegen(state) - - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeVar": - return TypeVar( - name=visit_required(self, "name", self.name, visitor), - colon=visit_sentinel(self, "colon", self.colon, visitor), - bound=visit_optional(self, "bound", self.bound, visitor), - ) - - -@add_slots -@dataclass(frozen=True) -class TypeVarTuple(CSTNode): - """ - A variadic type variable. - """ - - #: The name of this type variable. - name: Name - - #: The (optional) whitespace between the star declaring this type variable as - #: variadic, and the variable's name. - whitespace_after_star: SimpleWhitespace = SimpleWhitespace.field("") - - def _codegen_impl(self, state: CodegenState) -> None: - with state.record_syntactic_position(self): - state.add_token("*") - self.whitespace_after_star._codegen(state) - self.name._codegen(state) - - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeVarTuple": - return TypeVarTuple( - name=visit_required(self, "name", self.name, visitor), - whitespace_after_star=visit_required( - self, "whitespace_after_star", self.whitespace_after_star, visitor - ), - ) - - -@add_slots -@dataclass(frozen=True) -class ParamSpec(CSTNode): - """ - A parameter specification. - - Note: this node represents a parameter specification when declared using PEP-695 - syntax. - """ - - #: The name of this parameter specification. - name: Name - - #: The (optional) whitespace between the double star declaring this type variable as - #: a parameter specification, and the name. - whitespace_after_star: SimpleWhitespace = SimpleWhitespace.field("") - - def _codegen_impl(self, state: CodegenState) -> None: - with state.record_syntactic_position(self): - state.add_token("**") - self.whitespace_after_star._codegen(state) - self.name._codegen(state) - - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "ParamSpec": - return ParamSpec( - name=visit_required(self, "name", self.name, visitor), - whitespace_after_star=visit_required( - self, "whitespace_after_star", self.whitespace_after_star, visitor - ), - ) - - -@add_slots -@dataclass(frozen=True) -class TypeParam(CSTNode): - """ - A single type parameter that is contained in a :class:`TypeParameters` list. - """ - - #: The actual parameter. - param: Union[TypeVar, TypeVarTuple, ParamSpec] - - #: A trailing comma. If one is not provided, :class:`MaybeSentinel` will be replaced - #: with a comma only if a comma is required. - comma: Union[Comma, MaybeSentinel] = MaybeSentinel.DEFAULT - - #: The equal sign used to denote assignment if there is a default. - equal: Union[AssignEqual, MaybeSentinel] = MaybeSentinel.DEFAULT - - #: The star used to denote a variadic default - star: Literal["", "*"] = "" - - #: The whitespace between the star and the type. - whitespace_after_star: SimpleWhitespace = SimpleWhitespace.field("") - - #: Any optional default value, used when the argument is not supplied. - default: Optional[BaseExpression] = None - - def _codegen_impl(self, state: CodegenState, default_comma: bool = False) -> None: - self.param._codegen(state) - - equal = self.equal - if equal is MaybeSentinel.DEFAULT and self.default is not None: - state.add_token(" = ") - elif isinstance(equal, AssignEqual): - equal._codegen(state) - - state.add_token(self.star) - self.whitespace_after_star._codegen(state) - - default = self.default - if default is not None: - default._codegen(state) - - comma = self.comma - if isinstance(comma, MaybeSentinel): - if default_comma: - state.add_token(", ") - else: - comma._codegen(state) - - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeParam": - ret = TypeParam( - param=visit_required(self, "param", self.param, visitor), - equal=visit_sentinel(self, "equal", self.equal, visitor), - star=self.star, - whitespace_after_star=visit_required( - self, "whitespace_after_star", self.whitespace_after_star, visitor - ), - default=visit_optional(self, "default", self.default, visitor), - comma=visit_sentinel(self, "comma", self.comma, visitor), - ) - return ret - - def _validate(self) -> None: - if self.default is None and isinstance(self.equal, AssignEqual): - raise CSTValidationError( - "Must have a default when specifying an AssignEqual." - ) - if self.star and not (self.default or isinstance(self.equal, AssignEqual)): - raise CSTValidationError("Star can only be present if a default") - if isinstance(self.star, str) and self.star not in ("", "*"): - raise CSTValidationError("Must specify either '' or '*' for star.") - - -@add_slots -@dataclass(frozen=True) -class TypeParameters(CSTNode): - """ - Type parameters when specified with PEP-695 syntax. - - This node captures all specified parameters that are enclosed with square brackets. - """ - - #: The parameters within the square brackets. - params: Sequence[TypeParam] = () - - #: Opening square bracket that marks the start of these parameters. - lbracket: LeftSquareBracket = LeftSquareBracket.field() - #: Closing square bracket that marks the end of these parameters. - rbracket: RightSquareBracket = RightSquareBracket.field() - - def _codegen_impl(self, state: CodegenState) -> None: - self.lbracket._codegen(state) - params_len = len(self.params) - for idx, param in enumerate(self.params): - param._codegen(state, default_comma=idx + 1 < params_len) - self.rbracket._codegen(state) - - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeParameters": - return TypeParameters( - lbracket=visit_required(self, "lbracket", self.lbracket, visitor), - params=visit_sequence(self, "params", self.params, visitor), - rbracket=visit_required(self, "rbracket", self.rbracket, visitor), - ) - - -@add_slots -@dataclass(frozen=True) -class TypeAlias(BaseSmallStatement): - """ - A type alias statement. - - This node represents the ``type`` statement as specified initially by PEP-695. - Example: ``type ListOrSet[T] = list[T] | set[T]``. - """ - - #: The name being introduced in this statement. - name: Name - - #: Everything on the right hand side of the ``=``. - value: BaseExpression - - #: An optional list of type parameters, specified after the name. - type_parameters: Optional[TypeParameters] = None - - #: Whitespace between the ``type`` soft keyword and the name. - whitespace_after_type: SimpleWhitespace = SimpleWhitespace.field(" ") - - #: Whitespace between the name and the type parameters (if they exist) or the ``=``. - #: If not specified, :class:`MaybeSentinel` will be replaced with a single space if - #: there are no type parameters, otherwise no spaces. - whitespace_after_name: Union[SimpleWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) - - #: Whitespace between the type parameters and the ``=``. Always empty if there are - #: no type parameters. If not specified, :class:`MaybeSentinel` will be replaced - #: with a single space if there are type parameters. - whitespace_after_type_parameters: Union[SimpleWhitespace, MaybeSentinel] = ( - MaybeSentinel.DEFAULT - ) - - #: Whitespace between the ``=`` and the value. - whitespace_after_equals: SimpleWhitespace = SimpleWhitespace.field(" ") - - #: Optional semicolon when this is used in a statement line. This semicolon - #: owns the whitespace on both sides of it when it is used. - semicolon: Union[Semicolon, MaybeSentinel] = MaybeSentinel.DEFAULT - - def _validate(self) -> None: - if ( - self.type_parameters is None - and self.whitespace_after_type_parameters - not in { - SimpleWhitespace(""), - MaybeSentinel.DEFAULT, - } - ): - raise CSTValidationError( - "whitespace_after_type_parameters must be empty when there are no type parameters in a TypeAlias" - ) - - def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TypeAlias": - return TypeAlias( - whitespace_after_type=visit_required( - self, "whitespace_after_type", self.whitespace_after_type, visitor - ), - name=visit_required(self, "name", self.name, visitor), - whitespace_after_name=visit_sentinel( - self, "whitespace_after_name", self.whitespace_after_name, visitor - ), - type_parameters=visit_optional( - self, "type_parameters", self.type_parameters, visitor - ), - whitespace_after_type_parameters=visit_sentinel( - self, - "whitespace_after_type_parameters", - self.whitespace_after_type_parameters, - visitor, - ), - whitespace_after_equals=visit_required( - self, "whitespace_after_equals", self.whitespace_after_equals, visitor - ), - value=visit_required(self, "value", self.value, visitor), - semicolon=visit_sentinel(self, "semicolon", self.semicolon, visitor), - ) - - def _codegen_impl( - self, state: CodegenState, default_semicolon: bool = False - ) -> None: - with state.record_syntactic_position(self): - state.add_token("type") - self.whitespace_after_type._codegen(state) - self.name._codegen(state) - ws_after_name = self.whitespace_after_name - if isinstance(ws_after_name, MaybeSentinel): - if self.type_parameters is None: - state.add_token(" ") - else: - ws_after_name._codegen(state) - - ws_after_type_params = self.whitespace_after_type_parameters - if self.type_parameters is not None: - self.type_parameters._codegen(state) - if isinstance(ws_after_type_params, MaybeSentinel): - state.add_token(" ") - else: - ws_after_type_params._codegen(state) - - state.add_token("=") - self.whitespace_after_equals._codegen(state) - self.value._codegen(state) - - semi = self.semicolon - if isinstance(semi, MaybeSentinel): - if default_semicolon: - state.add_token("; ") - else: - semi._codegen(state) diff --git a/libcst/_nodes/tests/base.py b/libcst/_nodes/tests/base.py index 65e7059d..8245e301 100644 --- a/libcst/_nodes/tests/base.py +++ b/libcst/_nodes/tests/base.py @@ -239,7 +239,7 @@ class CSTNodeTest(UnitTest): def assert_parses( self, code: str, - parser: Callable[[str], cst.CSTNode], + parser: Callable[[str], cst.BaseExpression], expect_success: bool, ) -> None: if not expect_success: diff --git a/libcst/_nodes/tests/test_atom.py b/libcst/_nodes/tests/test_atom.py index a33732c2..c2e31608 100644 --- a/libcst/_nodes/tests/test_atom.py +++ b/libcst/_nodes/tests/test_atom.py @@ -9,6 +9,7 @@ from typing import Any import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -739,69 +740,6 @@ class AtomTest(CSTNodeTest): "parser": parse_expression, "expected_position": None, }, - # Unpacked tuple - { - "node": cst.FormattedString( - parts=[ - cst.FormattedStringExpression( - expression=cst.Tuple( - elements=[ - cst.Element( - value=cst.Name( - value="a", - ), - comma=cst.Comma( - whitespace_before=cst.SimpleWhitespace( - value="", - ), - whitespace_after=cst.SimpleWhitespace( - value=" ", - ), - ), - ), - cst.Element( - value=cst.Name( - value="b", - ), - ), - ], - lpar=[], - rpar=[], - ), - ), - ], - start="f'", - end="'", - ), - "code": "f'{a, b}'", - "parser": parse_expression, - "expected_position": None, - }, - # Conditional expression - { - "node": cst.FormattedString( - parts=[ - cst.FormattedStringExpression( - expression=cst.IfExp( - test=cst.Name( - value="b", - ), - body=cst.Name( - value="a", - ), - orelse=cst.Name( - value="c", - ), - ), - ), - ], - start="f'", - end="'", - ), - "code": "f'{a if b else c}'", - "parser": parse_expression, - "expected_position": None, - }, # Concatenated strings { "node": cst.ConcatenatedString( @@ -1183,7 +1121,7 @@ class AtomTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if not kwargs.get("expect_success", True): + if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_await.py b/libcst/_nodes/tests/test_await.py index 1d52642b..8aa408a9 100644 --- a/libcst/_nodes/tests/test_await.py +++ b/libcst/_nodes/tests/test_await.py @@ -46,14 +46,6 @@ class AwaitTest(CSTNodeTest): ), "expected_position": CodeRange((1, 2), (1, 13)), }, - # Whitespace after await - { - "node": cst.Await( - cst.Name("foo", lpar=[cst.LeftParen()], rpar=[cst.RightParen()]), - whitespace_after_await=cst.SimpleWhitespace(""), - ), - "code": "await(foo)", - }, ) ) def test_valid_py37(self, **kwargs: Any) -> None: diff --git a/libcst/_nodes/tests/test_binary_op.py b/libcst/_nodes/tests/test_binary_op.py index f6b40daf..b6ac8b09 100644 --- a/libcst/_nodes/tests/test_binary_op.py +++ b/libcst/_nodes/tests/test_binary_op.py @@ -174,18 +174,3 @@ class BinaryOperationTest(CSTNodeTest): ) def test_invalid(self, **kwargs: Any) -> None: self.assert_invalid(**kwargs) - - @data_provider( - ( - { - "code": '"a"' * 6000, - "parser": parse_expression, - }, - { - "code": "[_" + " for _ in _" * 6000 + "]", - "parser": parse_expression, - }, - ) - ) - def test_parse_error(self, **kwargs: Any) -> None: - self.assert_parses(**kwargs, expect_success=False) diff --git a/libcst/_nodes/tests/test_classdef.py b/libcst/_nodes/tests/test_classdef.py index 2e026a6c..db582dce 100644 --- a/libcst/_nodes/tests/test_classdef.py +++ b/libcst/_nodes/tests/test_classdef.py @@ -112,105 +112,6 @@ class ClassDefCreationTest(CSTNodeTest): def test_valid(self, **kwargs: Any) -> None: self.validate_node(**kwargs) - @data_provider( - ( - { - "node": cst.ClassDef( - cst.Name("Foo"), - cst.SimpleStatementSuite((cst.Pass(),)), - type_parameters=cst.TypeParameters( - ( - cst.TypeParam( - cst.TypeVar( - cst.Name("T"), - bound=cst.Name("int"), - colon=cst.Colon( - whitespace_after=cst.SimpleWhitespace(" ") - ), - ), - cst.Comma(whitespace_after=cst.SimpleWhitespace(" ")), - ), - cst.TypeParam( - cst.TypeVarTuple(cst.Name("Ts")), - cst.Comma(whitespace_after=cst.SimpleWhitespace(" ")), - ), - cst.TypeParam(cst.ParamSpec(cst.Name("KW"))), - ) - ), - ), - "code": "class Foo[T: int, *Ts, **KW]: pass\n", - }, - { - "node": cst.ClassDef( - cst.Name("Foo"), - cst.SimpleStatementSuite((cst.Pass(),)), - type_parameters=cst.TypeParameters( - params=( - cst.TypeParam( - param=cst.TypeVar( - cst.Name("T"), - bound=cst.Name("str"), - colon=cst.Colon( - whitespace_before=cst.SimpleWhitespace(" "), - whitespace_after=cst.ParenthesizedWhitespace( - empty_lines=(cst.EmptyLine(),), - indent=True, - ), - ), - ), - comma=cst.Comma(cst.SimpleWhitespace(" ")), - ), - cst.TypeParam( - cst.ParamSpec( - cst.Name("PS"), cst.SimpleWhitespace(" ") - ), - cst.Comma(cst.SimpleWhitespace(" ")), - ), - ) - ), - whitespace_after_type_parameters=cst.SimpleWhitespace(" "), - ), - "code": "class Foo[T :\n\nstr ,** PS ,] : pass\n", - }, - { - "node": cst.ClassDef( - cst.Name("Foo"), - cst.SimpleStatementSuite((cst.Pass(),)), - type_parameters=cst.TypeParameters( - params=( - cst.TypeParam( - param=cst.TypeVar( - cst.Name("T"), - bound=cst.Name("str"), - colon=cst.Colon( - whitespace_before=cst.SimpleWhitespace(" "), - whitespace_after=cst.ParenthesizedWhitespace( - empty_lines=(cst.EmptyLine(),), - indent=True, - ), - ), - ), - comma=cst.Comma(cst.SimpleWhitespace(" ")), - ), - cst.TypeParam( - cst.ParamSpec( - cst.Name("PS"), cst.SimpleWhitespace(" ") - ), - cst.Comma(cst.SimpleWhitespace(" ")), - ), - ) - ), - lpar=cst.LeftParen(), - rpar=cst.RightParen(), - whitespace_after_type_parameters=cst.SimpleWhitespace(" "), - ), - "code": "class Foo[T :\n\nstr ,** PS ,] (): pass\n", - }, - ) - ) - def test_valid_native(self, **kwargs: Any) -> None: - self.validate_node(**kwargs) - @data_provider( ( # Basic parenthesis tests. diff --git a/libcst/_nodes/tests/test_dict.py b/libcst/_nodes/tests/test_dict.py index 47cb0663..1ee33332 100644 --- a/libcst/_nodes/tests/test_dict.py +++ b/libcst/_nodes/tests/test_dict.py @@ -8,6 +8,7 @@ from typing import Any import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -187,6 +188,6 @@ class DictTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if not kwargs.get("expect_success", True): + if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_dict_comp.py b/libcst/_nodes/tests/test_dict_comp.py index a753375f..a9970f9d 100644 --- a/libcst/_nodes/tests/test_dict_comp.py +++ b/libcst/_nodes/tests/test_dict_comp.py @@ -26,17 +26,6 @@ class DictCompTest(CSTNodeTest): "parser": parse_expression, "expected_position": CodeRange((1, 0), (1, 17)), }, - # non-trivial keys & values in DictComp - { - "node": cst.DictComp( - cst.BinaryOperation(cst.Name("k1"), cst.Add(), cst.Name("k2")), - cst.BinaryOperation(cst.Name("v1"), cst.Add(), cst.Name("v2")), - cst.CompFor(target=cst.Name("a"), iter=cst.Name("b")), - ), - "code": "{k1 + k2: v1 + v2 for a in b}", - "parser": parse_expression, - "expected_position": CodeRange((1, 0), (1, 29)), - }, # custom whitespace around colon { "node": cst.DictComp( diff --git a/libcst/_nodes/tests/test_funcdef.py b/libcst/_nodes/tests/test_funcdef.py index 4ed7fcc3..5d5a80ac 100644 --- a/libcst/_nodes/tests/test_funcdef.py +++ b/libcst/_nodes/tests/test_funcdef.py @@ -8,6 +8,7 @@ from typing import Any, Callable import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -622,46 +623,6 @@ class FunctionDefCreationTest(CSTNodeTest): "code": "@ bar ( )\n", "expected_position": CodeRange((1, 0), (1, 10)), }, - # Allow nested calls on decorator - { - "node": cst.FunctionDef( - cst.Name("foo"), - cst.Parameters(), - cst.SimpleStatementSuite((cst.Pass(),)), - (cst.Decorator(cst.Call(func=cst.Call(func=cst.Name("bar")))),), - ), - "code": "@bar()()\ndef foo(): pass\n", - }, - # Allow any expression in decorator - { - "node": cst.FunctionDef( - cst.Name("foo"), - cst.Parameters(), - cst.SimpleStatementSuite((cst.Pass(),)), - ( - cst.Decorator( - cst.BinaryOperation(cst.Name("a"), cst.Add(), cst.Name("b")) - ), - ), - ), - "code": "@a + b\ndef foo(): pass\n", - }, - # Allow parentheses around decorator - { - "node": cst.FunctionDef( - cst.Name("foo"), - cst.Parameters(), - cst.SimpleStatementSuite((cst.Pass(),)), - ( - cst.Decorator( - cst.Name( - "bar", lpar=(cst.LeftParen(),), rpar=(cst.RightParen(),) - ) - ), - ), - ), - "code": "@(bar)\ndef foo(): pass\n", - }, # Parameters { "node": cst.Parameters( @@ -740,154 +701,6 @@ class FunctionDefCreationTest(CSTNodeTest): ) ) def test_valid(self, **kwargs: Any) -> None: - if "native_only" in kwargs: - kwargs.pop("native_only") - self.validate_node(**kwargs) - - @data_provider( - ( - # PEP 646 - { - "node": cst.FunctionDef( - name=cst.Name(value="foo"), - params=cst.Parameters( - params=[], - star_arg=cst.Param( - star="*", - name=cst.Name("a"), - annotation=cst.Annotation( - cst.StarredElement(value=cst.Name("b")), - whitespace_before_indicator=cst.SimpleWhitespace(""), - ), - ), - ), - body=cst.SimpleStatementSuite((cst.Pass(),)), - ), - "parser": parse_statement, - "code": "def foo(*a: *b): pass\n", - }, - { - "node": cst.FunctionDef( - name=cst.Name(value="foo"), - params=cst.Parameters( - params=[], - star_arg=cst.Param( - star="*", - name=cst.Name("a"), - annotation=cst.Annotation( - cst.StarredElement( - value=cst.Subscript( - value=cst.Name("tuple"), - slice=[ - cst.SubscriptElement( - cst.Index(cst.Name("int")), - comma=cst.Comma(), - ), - cst.SubscriptElement( - cst.Index( - value=cst.Name("Ts"), - star="*", - whitespace_after_star=cst.SimpleWhitespace( - "" - ), - ), - comma=cst.Comma(), - ), - cst.SubscriptElement( - cst.Index(cst.Ellipsis()) - ), - ], - ) - ), - whitespace_before_indicator=cst.SimpleWhitespace(""), - ), - ), - ), - body=cst.SimpleStatementSuite((cst.Pass(),)), - ), - "parser": parse_statement, - "code": "def foo(*a: *tuple[int,*Ts,...]): pass\n", - }, - # Single type variable - { - "node": cst.FunctionDef( - cst.Name("foo"), - cst.Parameters(), - cst.SimpleStatementSuite((cst.Pass(),)), - type_parameters=cst.TypeParameters( - (cst.TypeParam(cst.TypeVar(cst.Name("T"))),) - ), - ), - "code": "def foo[T](): pass\n", - "parser": parse_statement, - }, - # All the type parameters - { - "node": cst.FunctionDef( - cst.Name("foo"), - cst.Parameters(), - cst.SimpleStatementSuite((cst.Pass(),)), - type_parameters=cst.TypeParameters( - ( - cst.TypeParam( - cst.TypeVar( - cst.Name("T"), - bound=cst.Name("int"), - colon=cst.Colon( - whitespace_after=cst.SimpleWhitespace(" ") - ), - ), - cst.Comma(whitespace_after=cst.SimpleWhitespace(" ")), - ), - cst.TypeParam( - cst.TypeVarTuple(cst.Name("Ts")), - cst.Comma(whitespace_after=cst.SimpleWhitespace(" ")), - ), - cst.TypeParam(cst.ParamSpec(cst.Name("KW"))), - ) - ), - ), - "code": "def foo[T: int, *Ts, **KW](): pass\n", - "parser": parse_statement, - }, - # Type parameters with whitespace - { - "node": cst.FunctionDef( - cst.Name("foo"), - cst.Parameters(), - cst.SimpleStatementSuite((cst.Pass(),)), - type_parameters=cst.TypeParameters( - params=( - cst.TypeParam( - param=cst.TypeVar( - cst.Name("T"), - bound=cst.Name("str"), - colon=cst.Colon( - whitespace_before=cst.SimpleWhitespace(" "), - whitespace_after=cst.ParenthesizedWhitespace( - empty_lines=(cst.EmptyLine(),), - indent=True, - ), - ), - ), - comma=cst.Comma(cst.SimpleWhitespace(" ")), - ), - cst.TypeParam( - cst.ParamSpec( - cst.Name("PS"), cst.SimpleWhitespace(" ") - ), - cst.Comma(cst.SimpleWhitespace(" ")), - ), - ) - ), - whitespace_after_type_parameters=cst.SimpleWhitespace(" "), - ), - "code": "def foo[T :\n\nstr ,** PS ,] (): pass\n", - "parser": parse_statement, - }, - ) - ) - def test_valid_native(self, **kwargs: Any) -> None: self.validate_node(**kwargs) @data_provider( @@ -1034,6 +847,22 @@ class FunctionDefCreationTest(CSTNodeTest): ), r"Expecting a star prefix of '\*\*'", ), + # Validate decorator name semantics + ( + lambda: cst.FunctionDef( + cst.Name("foo"), + cst.Parameters(), + cst.SimpleStatementSuite((cst.Pass(),)), + ( + cst.Decorator( + cst.Name( + "bar", lpar=(cst.LeftParen(),), rpar=(cst.RightParen(),) + ) + ), + ), + ), + "Cannot have parens around decorator in a Decorator", + ), ) ) def test_invalid( @@ -1047,9 +876,7 @@ def _parse_statement_force_38(code: str) -> cst.BaseCompoundStatement: code, config=cst.PartialParserConfig(python_version="3.8") ) if not isinstance(statement, cst.BaseCompoundStatement): - raise ValueError( - "This function is expecting to parse compound statements only!" - ) + raise Exception("This function is expecting to parse compound statements only!") return statement @@ -1972,36 +1799,6 @@ class FunctionDefParserTest(CSTNodeTest): ), "code": "def foo(bar, baz, /): pass\n", }, - # Positional only params with whitespace after but no comma - { - "node": cst.FunctionDef( - cst.Name("foo"), - cst.Parameters( - posonly_params=( - cst.Param( - cst.Name("bar"), - star="", - comma=cst.Comma( - whitespace_after=cst.SimpleWhitespace(" ") - ), - ), - cst.Param( - cst.Name("baz"), - star="", - comma=cst.Comma( - whitespace_after=cst.SimpleWhitespace(" ") - ), - ), - ), - posonly_ind=cst.ParamSlash( - whitespace_after=cst.SimpleWhitespace(" ") - ), - ), - cst.SimpleStatementSuite((cst.Pass(),)), - ), - "code": "def foo(bar, baz, / ): pass\n", - "native_only": True, - }, # Typed positional only params { "node": cst.FunctionDef( @@ -2217,7 +2014,7 @@ class FunctionDefParserTest(CSTNodeTest): }, ) ) - def test_valid_38(self, node: cst.CSTNode, code: str, **kwargs: Any) -> None: + def test_valid_38(self, node: cst.CSTNode, code: str) -> None: self.validate_node(node, code, _parse_statement_force_38) @data_provider( @@ -2245,23 +2042,6 @@ class FunctionDefParserTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if not kwargs.get("expect_success", True): + if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) - - @data_provider( - ( - {"code": "A[:*b]"}, - {"code": "A[*b:]"}, - {"code": "A[*b:*b]"}, - {"code": "A[*(1:2)]"}, - {"code": "A[*:]"}, - {"code": "A[:*]"}, - {"code": "A[**b]"}, - {"code": "def f(x: *b): pass"}, - {"code": "def f(**x: *b): pass"}, - {"code": "x: *b"}, - ) - ) - def test_parse_error(self, **kwargs: Any) -> None: - self.assert_parses(**kwargs, expect_success=False, parser=parse_statement) diff --git a/libcst/_nodes/tests/test_if.py b/libcst/_nodes/tests/test_if.py index 7615614e..e6e4c5ae 100644 --- a/libcst/_nodes/tests/test_if.py +++ b/libcst/_nodes/tests/test_if.py @@ -3,7 +3,7 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any, Callable +from typing import Any import libcst as cst from libcst import parse_statement @@ -129,21 +129,3 @@ class IfTest(CSTNodeTest): ) def test_valid(self, **kwargs: Any) -> None: self.validate_node(**kwargs) - - @data_provider( - ( - # Validate whitespace handling - ( - lambda: cst.If( - cst.Name("conditional"), - cst.SimpleStatementSuite((cst.Pass(),)), - whitespace_before_test=cst.SimpleWhitespace(""), - ), - "Must have at least one space after 'if' keyword.", - ), - ) - ) - def test_invalid( - self, get_node: Callable[[], cst.CSTNode], expected_re: str - ) -> None: - self.assert_invalid(get_node, expected_re) diff --git a/libcst/_nodes/tests/test_ifexp.py b/libcst/_nodes/tests/test_ifexp.py index dd260ef3..e00924b1 100644 --- a/libcst/_nodes/tests/test_ifexp.py +++ b/libcst/_nodes/tests/test_ifexp.py @@ -52,41 +52,6 @@ class IfExpTest(CSTNodeTest): "(foo)if(bar)else(baz)", CodeRange((1, 0), (1, 21)), ), - ( - cst.IfExp( - body=cst.Name("foo"), - whitespace_before_if=cst.SimpleWhitespace(" "), - whitespace_after_if=cst.SimpleWhitespace(" "), - test=cst.Name("bar"), - whitespace_before_else=cst.SimpleWhitespace(" "), - whitespace_after_else=cst.SimpleWhitespace(""), - orelse=cst.IfExp( - body=cst.SimpleString("''"), - whitespace_before_if=cst.SimpleWhitespace(""), - test=cst.Name("bar"), - orelse=cst.Name("baz"), - ), - ), - "foo if bar else''if bar else baz", - CodeRange((1, 0), (1, 32)), - ), - ( - cst.GeneratorExp( - elt=cst.IfExp( - body=cst.Name("foo"), - test=cst.Name("bar"), - orelse=cst.SimpleString("''"), - whitespace_after_else=cst.SimpleWhitespace(""), - ), - for_in=cst.CompFor( - target=cst.Name("_"), - iter=cst.Name("_"), - whitespace_before=cst.SimpleWhitespace(""), - ), - ), - "(foo if bar else''for _ in _)", - CodeRange((1, 1), (1, 28)), - ), # Make sure that spacing works ( cst.IfExp( diff --git a/libcst/_nodes/tests/test_import.py b/libcst/_nodes/tests/test_import.py index d6ad8cbe..eedfcdaf 100644 --- a/libcst/_nodes/tests/test_import.py +++ b/libcst/_nodes/tests/test_import.py @@ -195,20 +195,6 @@ class ImportCreateTest(CSTNodeTest): ), "expected_re": "at least one space", }, - { - "get_node": lambda: cst.Import( - names=( - cst.ImportAlias( - cst.Name("foo"), - asname=cst.AsName( - cst.Name("bar"), - whitespace_before_as=cst.SimpleWhitespace(""), - ), - ), - ), - ), - "expected_re": "at least one space", - }, { "get_node": lambda: cst.Import( names=[ @@ -578,25 +564,6 @@ class ImportFromCreateTest(CSTNodeTest): ), "expected_re": "one space after import", }, - { - "get_node": lambda: cst.ImportFrom( - module=cst.Name("foo"), - names=( - cst.ImportAlias( - cst.Name("bar"), - asname=cst.AsName( - cst.Name( - "baz", - lpar=(cst.LeftParen(),), - rpar=(cst.RightParen(),), - ), - whitespace_before_as=cst.SimpleWhitespace(""), - ), - ), - ), - ), - "expected_re": "one space before as keyword", - }, ) ) def test_invalid(self, **kwargs: Any) -> None: diff --git a/libcst/_nodes/tests/test_lambda.py b/libcst/_nodes/tests/test_lambda.py index 64a561ed..82c1b675 100644 --- a/libcst/_nodes/tests/test_lambda.py +++ b/libcst/_nodes/tests/test_lambda.py @@ -30,22 +30,6 @@ class LambdaCreationTest(CSTNodeTest): ), "code": "lambda bar, baz, /: 5", }, - # Test basic positional only params with extra trailing whitespace - { - "node": cst.Lambda( - cst.Parameters( - posonly_params=( - cst.Param(cst.Name("bar")), - cst.Param(cst.Name("baz")), - ), - posonly_ind=cst.ParamSlash( - whitespace_after=cst.SimpleWhitespace(" ") - ), - ), - cst.Integer("5"), - ), - "code": "lambda bar, baz, / : 5", - }, # Test basic positional params ( cst.Lambda( @@ -303,6 +287,30 @@ class LambdaCreationTest(CSTNodeTest): ), "at least one space after lambda", ), + ( + lambda: cst.Lambda( + cst.Parameters(star_arg=cst.Param(cst.Name("arg"))), + cst.Integer("5"), + whitespace_after_lambda=cst.SimpleWhitespace(""), + ), + "at least one space after lambda", + ), + ( + lambda: cst.Lambda( + cst.Parameters(kwonly_params=(cst.Param(cst.Name("arg")),)), + cst.Integer("5"), + whitespace_after_lambda=cst.SimpleWhitespace(""), + ), + "at least one space after lambda", + ), + ( + lambda: cst.Lambda( + cst.Parameters(star_kwarg=cst.Param(cst.Name("arg"))), + cst.Integer("5"), + whitespace_after_lambda=cst.SimpleWhitespace(""), + ), + "at least one space after lambda", + ), ( lambda: cst.Lambda( cst.Parameters( @@ -920,53 +928,6 @@ class LambdaParserTest(CSTNodeTest): ), "( lambda : 5 )", ), - # No space between lambda and params - ( - cst.Lambda( - cst.Parameters(star_arg=cst.Param(cst.Name("args"), star="*")), - cst.Integer("5"), - whitespace_after_lambda=cst.SimpleWhitespace(""), - ), - "lambda*args: 5", - ), - ( - cst.Lambda( - cst.Parameters(star_kwarg=cst.Param(cst.Name("kwargs"), star="**")), - cst.Integer("5"), - whitespace_after_lambda=cst.SimpleWhitespace(""), - ), - "lambda**kwargs: 5", - ), - ( - cst.Lambda( - cst.Parameters( - star_arg=cst.ParamStar( - comma=cst.Comma( - cst.SimpleWhitespace(""), cst.SimpleWhitespace("") - ) - ), - kwonly_params=[cst.Param(cst.Name("args"), star="")], - ), - cst.Integer("5"), - whitespace_after_lambda=cst.SimpleWhitespace(""), - ), - "lambda*,args: 5", - ), - ( - cst.ListComp( - elt=cst.Lambda( - params=cst.Parameters(), - body=cst.Tuple(()), - colon=cst.Colon(), - ), - for_in=cst.CompFor( - target=cst.Name("_"), - iter=cst.Name("_"), - whitespace_before=cst.SimpleWhitespace(""), - ), - ), - "[lambda:()for _ in _]", - ), ) ) def test_valid( diff --git a/libcst/_nodes/tests/test_list.py b/libcst/_nodes/tests/test_list.py index 2f96124c..9096ced6 100644 --- a/libcst/_nodes/tests/test_list.py +++ b/libcst/_nodes/tests/test_list.py @@ -8,11 +8,13 @@ from typing import Any, Callable import libcst as cst from libcst import parse_expression, parse_statement from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider class ListTest(CSTNodeTest): + # A lot of Element/StarredElement tests are provided by the tests for Tuple, so we # we don't need to duplicate them here. @data_provider( @@ -125,6 +127,6 @@ class ListTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if not kwargs.get("expect_success", True): + if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_match.py b/libcst/_nodes/tests/test_match.py index 2335b7c3..edf51d84 100644 --- a/libcst/_nodes/tests/test_match.py +++ b/libcst/_nodes/tests/test_match.py @@ -1,16 +1,19 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any, Callable +from typing import Any, Callable, Optional import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest +from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider -parser: Callable[[str], cst.CSTNode] = parse_statement +parser: Optional[Callable[[str], cst.CSTNode]] = ( + parse_statement if is_native() else None +) class MatchTest(CSTNodeTest): @@ -36,39 +39,6 @@ class MatchTest(CSTNodeTest): + ' case "foo": pass\n', "parser": parser, }, - # Parenthesized value - { - "node": cst.Match( - subject=cst.Name( - value="x", - ), - cases=[ - cst.MatchCase( - pattern=cst.MatchAs( - pattern=cst.MatchValue( - value=cst.Integer( - value="1", - lpar=[ - cst.LeftParen(), - ], - rpar=[ - cst.RightParen(), - ], - ), - ), - name=cst.Name( - value="z", - ), - whitespace_before_as=cst.SimpleWhitespace(" "), - whitespace_after_as=cst.SimpleWhitespace(" "), - ), - body=cst.SimpleStatementSuite([cst.Pass()]), - ), - ], - ), - "code": "match x:\n case (1) as z: pass\n", - "parser": parser, - }, # List patterns { "node": cst.Match( @@ -455,34 +425,6 @@ class MatchTest(CSTNodeTest): + " case None | False | True: pass\n", "parser": None, }, - # Match without whitespace between keyword and the expr - { - "node": cst.Match( - subject=cst.Name( - "x", lpar=[cst.LeftParen()], rpar=[cst.RightParen()] - ), - cases=[ - cst.MatchCase( - pattern=cst.MatchSingleton( - cst.Name( - "None", - lpar=[cst.LeftParen()], - rpar=[cst.RightParen()], - ) - ), - body=cst.SimpleStatementSuite((cst.Pass(),)), - whitespace_after_case=cst.SimpleWhitespace( - value="", - ), - ), - ], - whitespace_after_match=cst.SimpleWhitespace( - value="", - ), - ), - "code": "match(x):\n case(None): pass\n", - "parser": parser, - }, ) ) def test_valid(self, **kwargs: Any) -> None: diff --git a/libcst/_nodes/tests/test_matrix_multiply.py b/libcst/_nodes/tests/test_matrix_multiply.py index 500b7aab..5b4b8668 100644 --- a/libcst/_nodes/tests/test_matrix_multiply.py +++ b/libcst/_nodes/tests/test_matrix_multiply.py @@ -11,6 +11,7 @@ from libcst._nodes.tests.base import ( parse_expression_as, parse_statement_as, ) +from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider @@ -69,6 +70,6 @@ class NamedExprTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if not kwargs.get("expect_success", True): + if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_module.py b/libcst/_nodes/tests/test_module.py index 40de8f8e..5b33c6b7 100644 --- a/libcst/_nodes/tests/test_module.py +++ b/libcst/_nodes/tests/test_module.py @@ -8,7 +8,7 @@ from typing import cast, Tuple import libcst as cst from libcst import parse_module, parse_statement from libcst._nodes.tests.base import CSTNodeTest - +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange, MetadataWrapper, PositionProvider from libcst.testing.utils import data_provider @@ -117,7 +117,7 @@ class ModuleTest(CSTNodeTest): def test_parser( self, *, code: str, expected: cst.Module, enabled_for_native: bool = True ) -> None: - if not enabled_for_native: + if is_native() and not enabled_for_native: self.skipTest("Disabled for native parser") self.assertEqual(parse_module(code), expected) diff --git a/libcst/_nodes/tests/test_namedexpr.py b/libcst/_nodes/tests/test_namedexpr.py index 6ebcf978..f24045ca 100644 --- a/libcst/_nodes/tests/test_namedexpr.py +++ b/libcst/_nodes/tests/test_namedexpr.py @@ -22,9 +22,7 @@ def _parse_statement_force_38(code: str) -> cst.BaseCompoundStatement: code, config=cst.PartialParserConfig(python_version="3.8") ) if not isinstance(statement, cst.BaseCompoundStatement): - raise ValueError( - "This function is expecting to parse compound statements only!" - ) + raise Exception("This function is expecting to parse compound statements only!") return statement @@ -168,22 +166,6 @@ class NamedExprTest(CSTNodeTest): "parser": _parse_expression_force_38, "expected_position": None, }, - { - "node": cst.ListComp( - elt=cst.NamedExpr( - cst.Name("_"), - cst.SimpleString("''"), - whitespace_after_walrus=cst.SimpleWhitespace(""), - whitespace_before_walrus=cst.SimpleWhitespace(""), - ), - for_in=cst.CompFor( - target=cst.Name("_"), - iter=cst.Name("_"), - whitespace_before=cst.SimpleWhitespace(""), - ), - ), - "code": "[_:=''for _ in _]", - }, ) ) def test_valid(self, **kwargs: Any) -> None: diff --git a/libcst/_nodes/tests/test_removal_behavior.py b/libcst/_nodes/tests/test_removal_behavior.py index 709b26f5..9b1bf619 100644 --- a/libcst/_nodes/tests/test_removal_behavior.py +++ b/libcst/_nodes/tests/test_removal_behavior.py @@ -95,7 +95,7 @@ class RemovalBehavior(CSTNodeTest): self, before: str, after: str, visitor: Type[CSTTransformer] ) -> None: if before.endswith("\n") or after.endswith("\n"): - raise ValueError("Test cases should not be newline-terminated!") + raise Exception("Test cases should not be newline-terminated!") # Test doesn't have newline termination case before_module = parse_module(before) diff --git a/libcst/_nodes/tests/test_set.py b/libcst/_nodes/tests/test_set.py index 699b458a..bdf84955 100644 --- a/libcst/_nodes/tests/test_set.py +++ b/libcst/_nodes/tests/test_set.py @@ -8,10 +8,12 @@ from typing import Any, Callable import libcst as cst from libcst import parse_expression from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider class ListTest(CSTNodeTest): + # A lot of Element/StarredElement tests are provided by the tests for Tuple, so we # we don't need to duplicate them here. @data_provider( @@ -132,6 +134,6 @@ class ListTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if not kwargs.get("expect_success", True): + if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_simple_comp.py b/libcst/_nodes/tests/test_simple_comp.py index 33ba4164..4de0c0a9 100644 --- a/libcst/_nodes/tests/test_simple_comp.py +++ b/libcst/_nodes/tests/test_simple_comp.py @@ -41,33 +41,6 @@ class SimpleCompTest(CSTNodeTest): "code": "{a for b in c}", "parser": parse_expression, }, - # non-trivial elt in GeneratorExp - { - "node": cst.GeneratorExp( - cst.BinaryOperation(cst.Name("a1"), cst.Add(), cst.Name("a2")), - cst.CompFor(target=cst.Name("b"), iter=cst.Name("c")), - ), - "code": "(a1 + a2 for b in c)", - "parser": parse_expression, - }, - # non-trivial elt in ListComp - { - "node": cst.ListComp( - cst.BinaryOperation(cst.Name("a1"), cst.Add(), cst.Name("a2")), - cst.CompFor(target=cst.Name("b"), iter=cst.Name("c")), - ), - "code": "[a1 + a2 for b in c]", - "parser": parse_expression, - }, - # non-trivial elt in SetComp - { - "node": cst.SetComp( - cst.BinaryOperation(cst.Name("a1"), cst.Add(), cst.Name("a2")), - cst.CompFor(target=cst.Name("b"), iter=cst.Name("c")), - ), - "code": "{a1 + a2 for b in c}", - "parser": parse_expression, - }, # async GeneratorExp { "node": cst.GeneratorExp( diff --git a/libcst/_nodes/tests/test_simple_string.py b/libcst/_nodes/tests/test_simple_string.py deleted file mode 100644 index d9abec50..00000000 --- a/libcst/_nodes/tests/test_simple_string.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -import unittest - -import libcst as cst - - -class TestSimpleString(unittest.TestCase): - def test_quote(self) -> None: - test_cases = [ - ('"a"', '"'), - ("'b'", "'"), - ('""', '"'), - ("''", "'"), - ('"""c"""', '"""'), - ("'''d'''", "'''"), - ('""""e"""', '"""'), - ("''''f'''", "'''"), - ('"""""g"""', '"""'), - ("'''''h'''", "'''"), - ('""""""', '"""'), - ("''''''", "'''"), - ] - - for s, expected_quote in test_cases: - simple_string = cst.SimpleString(s) - actual = simple_string.quote - self.assertEqual(expected_quote, actual) diff --git a/libcst/_nodes/tests/test_template_strings.py b/libcst/_nodes/tests/test_template_strings.py deleted file mode 100644 index 6e4c308a..00000000 --- a/libcst/_nodes/tests/test_template_strings.py +++ /dev/null @@ -1,183 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from typing import Callable, Optional - -import libcst as cst -from libcst import parse_expression -from libcst._nodes.tests.base import CSTNodeTest -from libcst.metadata import CodeRange -from libcst.testing.utils import data_provider - - -class TemplatedStringTest(CSTNodeTest): - @data_provider( - ( - # Simple t-string with only text - ( - cst.TemplatedString( - parts=(cst.TemplatedStringText("hello world"),), - ), - 't"hello world"', - True, - ), - # t-string with one expression - ( - cst.TemplatedString( - parts=( - cst.TemplatedStringText("hello "), - cst.TemplatedStringExpression( - expression=cst.Name("name"), - ), - ), - ), - 't"hello {name}"', - True, - ), - # t-string with multiple expressions - ( - cst.TemplatedString( - parts=( - cst.TemplatedStringText("a="), - cst.TemplatedStringExpression(expression=cst.Name("a")), - cst.TemplatedStringText(", b="), - cst.TemplatedStringExpression(expression=cst.Name("b")), - ), - ), - 't"a={a}, b={b}"', - True, - CodeRange((1, 0), (1, 15)), - ), - # t-string with nested expression - ( - cst.TemplatedString( - parts=( - cst.TemplatedStringText("sum="), - cst.TemplatedStringExpression( - expression=cst.BinaryOperation( - left=cst.Name("a"), - operator=cst.Add(), - right=cst.Name("b"), - ) - ), - ), - ), - 't"sum={a + b}"', - True, - ), - # t-string with spacing in expression - ( - cst.TemplatedString( - parts=( - cst.TemplatedStringText("x = "), - cst.TemplatedStringExpression( - whitespace_before_expression=cst.SimpleWhitespace(" "), - expression=cst.Name("x"), - whitespace_after_expression=cst.SimpleWhitespace(" "), - ), - ), - ), - 't"x = { x }"', - True, - ), - # t-string with escaped braces - ( - cst.TemplatedString( - parts=(cst.TemplatedStringText("{{foo}}"),), - ), - 't"{{foo}}"', - True, - ), - # t-string with only an expression - ( - cst.TemplatedString( - parts=( - cst.TemplatedStringExpression(expression=cst.Name("value")), - ), - ), - 't"{value}"', - True, - ), - # t-string with whitespace and newlines - ( - cst.TemplatedString( - parts=( - cst.TemplatedStringText("line1\\n"), - cst.TemplatedStringExpression(expression=cst.Name("x")), - cst.TemplatedStringText("\\nline2"), - ), - ), - 't"line1\\n{x}\\nline2"', - True, - ), - # t-string with parenthesis (not typical, but test node construction) - ( - cst.TemplatedString( - lpar=(cst.LeftParen(),), - parts=(cst.TemplatedStringText("foo"),), - rpar=(cst.RightParen(),), - ), - '(t"foo")', - True, - ), - # t-string with whitespace in delimiters - ( - cst.TemplatedString( - lpar=(cst.LeftParen(whitespace_after=cst.SimpleWhitespace(" ")),), - parts=(cst.TemplatedStringText("foo"),), - rpar=(cst.RightParen(whitespace_before=cst.SimpleWhitespace(" ")),), - ), - '( t"foo" )', - True, - ), - # Test TemplatedStringText and TemplatedStringExpression individually - ( - cst.TemplatedStringText("abc"), - "abc", - False, - CodeRange((1, 0), (1, 3)), - ), - ( - cst.TemplatedStringExpression(expression=cst.Name("foo")), - "{foo}", - False, - CodeRange((1, 0), (1, 5)), - ), - ) - ) - def test_valid( - self, - node: cst.CSTNode, - code: str, - check_parsing: bool, - position: Optional[CodeRange] = None, - ) -> None: - if check_parsing: - self.validate_node(node, code, parse_expression, expected_position=position) - else: - self.validate_node(node, code, expected_position=position) - - @data_provider( - ( - ( - lambda: cst.TemplatedString( - parts=(cst.TemplatedStringText("foo"),), - lpar=(cst.LeftParen(),), - ), - "left paren without right paren", - ), - ( - lambda: cst.TemplatedString( - parts=(cst.TemplatedStringText("foo"),), - rpar=(cst.RightParen(),), - ), - "right paren without left paren", - ), - ) - ) - def test_invalid( - self, get_node: Callable[[], cst.CSTNode], expected_re: str - ) -> None: - self.assert_invalid(get_node, expected_re) diff --git a/libcst/_nodes/tests/test_try.py b/libcst/_nodes/tests/test_try.py index c5ae2462..c04a9db2 100644 --- a/libcst/_nodes/tests/test_try.py +++ b/libcst/_nodes/tests/test_try.py @@ -3,15 +3,18 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any, Callable +from typing import Any, Callable, Optional import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider -native_parse_statement: Callable[[str], cst.CSTNode] = parse_statement +native_parse_statement: Optional[Callable[[str], cst.CSTNode]] = ( + parse_statement if is_native() else None +) class TryTest(CSTNodeTest): @@ -326,52 +329,6 @@ class TryTest(CSTNodeTest): "code": "try: pass\nexcept(IOError, ImportError): pass\n", "parser": parse_statement, }, - # No space before as - { - "node": cst.Try( - cst.SimpleStatementSuite((cst.Pass(),)), - handlers=[ - cst.ExceptHandler( - cst.SimpleStatementSuite((cst.Pass(),)), - whitespace_after_except=cst.SimpleWhitespace(" "), - type=cst.Call(cst.Name("foo")), - name=cst.AsName( - whitespace_before_as=cst.SimpleWhitespace(""), - name=cst.Name("bar"), - ), - ) - ], - ), - "code": "try: pass\nexcept foo()as bar: pass\n", - }, - # PEP758 - Multiple exceptions with no parentheses - { - "node": cst.Try( - cst.SimpleStatementSuite((cst.Pass(),)), - handlers=[ - cst.ExceptHandler( - cst.SimpleStatementSuite((cst.Pass(),)), - type=cst.Tuple( - elements=[ - cst.Element( - value=cst.Name( - value="ValueError", - ), - ), - cst.Element( - value=cst.Name( - value="RuntimeError", - ), - ), - ], - lpar=[], - rpar=[], - ), - ) - ], - ), - "code": "try: pass\nexcept ValueError, RuntimeError: pass\n", - }, ) ) def test_valid(self, **kwargs: Any) -> None: @@ -389,6 +346,12 @@ class TryTest(CSTNodeTest): ), "expected_re": "between 'as'", }, + { + "get_node": lambda: cst.AsName( + cst.Name("bla"), whitespace_before_as=cst.SimpleWhitespace("") + ), + "expected_re": "before 'as'", + }, { "get_node": lambda: cst.ExceptHandler( cst.SimpleStatementSuite((cst.Pass(),)), @@ -604,38 +567,6 @@ class TryStarTest(CSTNodeTest): "parser": native_parse_statement, "expected_position": CodeRange((1, 0), (5, 13)), }, - # PEP758 - Multiple exceptions with no parentheses - { - "node": cst.TryStar( - cst.SimpleStatementSuite((cst.Pass(),)), - handlers=[ - cst.ExceptStarHandler( - cst.SimpleStatementSuite((cst.Pass(),)), - type=cst.Tuple( - elements=[ - cst.Element( - value=cst.Name( - value="ValueError", - ), - comma=cst.Comma( - whitespace_after=cst.SimpleWhitespace(" ") - ), - ), - cst.Element( - value=cst.Name( - value="RuntimeError", - ), - ), - ], - lpar=[], - rpar=[], - ), - ) - ], - ), - "code": "try: pass\nexcept* ValueError, RuntimeError: pass\n", - "parser": native_parse_statement, - }, ) ) def test_valid(self, **kwargs: Any) -> None: diff --git a/libcst/_nodes/tests/test_tuple.py b/libcst/_nodes/tests/test_tuple.py index aa3d68bb..db9f2cce 100644 --- a/libcst/_nodes/tests/test_tuple.py +++ b/libcst/_nodes/tests/test_tuple.py @@ -8,6 +8,7 @@ from typing import Any, Callable import libcst as cst from libcst import parse_expression, parse_statement from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -90,47 +91,6 @@ class TupleTest(CSTNodeTest): "parser": parse_expression, "expected_position": CodeRange((1, 1), (1, 11)), }, - # top-level two-element tuple, with one being starred - { - "node": cst.SimpleStatementLine( - body=[ - cst.Expr( - value=cst.Tuple( - [ - cst.Element(cst.Name("one"), comma=cst.Comma()), - cst.StarredElement(cst.Name("two")), - ], - lpar=[], - rpar=[], - ) - ) - ] - ), - "code": "one,*two\n", - "parser": parse_statement, - }, - # top-level three-element tuple, start/end is starred - { - "node": cst.SimpleStatementLine( - body=[ - cst.Expr( - value=cst.Tuple( - [ - cst.StarredElement( - cst.Name("one"), comma=cst.Comma() - ), - cst.Element(cst.Name("two"), comma=cst.Comma()), - cst.StarredElement(cst.Name("three")), - ], - lpar=[], - rpar=[], - ) - ) - ] - ), - "code": "*one,two,*three\n", - "parser": parse_statement, - }, # missing spaces around tuple, okay with parenthesis { "node": cst.For( @@ -285,6 +245,6 @@ class TupleTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if not kwargs.get("expect_success", True): + if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_type_alias.py b/libcst/_nodes/tests/test_type_alias.py deleted file mode 100644 index 865135c1..00000000 --- a/libcst/_nodes/tests/test_type_alias.py +++ /dev/null @@ -1,252 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from typing import Any - -import libcst as cst -from libcst import parse_statement -from libcst._nodes.tests.base import CSTNodeTest -from libcst.metadata import CodeRange -from libcst.testing.utils import data_provider - - -class TypeAliasCreationTest(CSTNodeTest): - @data_provider( - ( - { - "node": cst.TypeAlias( - cst.Name("foo"), - cst.Name("bar"), - ), - "code": "type foo = bar", - "expected_position": CodeRange((1, 0), (1, 14)), - }, - { - "node": cst.TypeAlias( - cst.Name("foo"), - type_parameters=cst.TypeParameters( - [cst.TypeParam(cst.TypeVar(cst.Name("T")))] - ), - value=cst.BinaryOperation( - cst.Name("bar"), cst.BitOr(), cst.Name("baz") - ), - ), - "code": "type foo[T] = bar | baz", - "expected_position": CodeRange((1, 0), (1, 23)), - }, - { - "node": cst.TypeAlias( - cst.Name("foo"), - type_parameters=cst.TypeParameters( - [ - cst.TypeParam( - cst.TypeVar(cst.Name("T"), bound=cst.Name("str")) - ), - cst.TypeParam(cst.TypeVarTuple(cst.Name("Ts"))), - cst.TypeParam(cst.ParamSpec(cst.Name("KW"))), - ] - ), - value=cst.BinaryOperation( - cst.Name("bar"), cst.BitOr(), cst.Name("baz") - ), - ), - "code": "type foo[T: str, *Ts, **KW] = bar | baz", - "expected_position": CodeRange((1, 0), (1, 39)), - }, - { - "node": cst.TypeAlias( - cst.Name("foo"), - type_parameters=cst.TypeParameters( - [ - cst.TypeParam( - cst.TypeVar(cst.Name("T")), default=cst.Name("str") - ), - ] - ), - value=cst.Name("bar"), - ), - "code": "type foo[T = str] = bar", - "expected_position": CodeRange((1, 0), (1, 23)), - }, - { - "node": cst.TypeAlias( - cst.Name("foo"), - type_parameters=cst.TypeParameters( - [ - cst.TypeParam( - cst.ParamSpec(cst.Name("P")), - default=cst.List( - elements=[ - cst.Element(cst.Name("int")), - cst.Element(cst.Name("str")), - ] - ), - ), - ] - ), - value=cst.Name("bar"), - ), - "code": "type foo[**P = [int, str]] = bar", - "expected_position": CodeRange((1, 0), (1, 32)), - }, - { - "node": cst.TypeAlias( - cst.Name("foo"), - type_parameters=cst.TypeParameters( - [ - cst.TypeParam( - cst.TypeVarTuple(cst.Name("T")), - equal=cst.AssignEqual(), - default=cst.Name("default"), - star="*", - ), - ] - ), - value=cst.Name("bar"), - ), - "code": "type foo[*T = *default] = bar", - "expected_position": CodeRange((1, 0), (1, 29)), - }, - { - "node": cst.TypeAlias( - cst.Name("foo"), - type_parameters=cst.TypeParameters( - [ - cst.TypeParam( - cst.TypeVarTuple(cst.Name("T")), - equal=cst.AssignEqual(), - default=cst.Name("default"), - star="*", - whitespace_after_star=cst.SimpleWhitespace(" "), - ), - ] - ), - value=cst.Name("bar"), - ), - "code": "type foo[*T = * default] = bar", - "expected_position": CodeRange((1, 0), (1, 31)), - }, - ) - ) - def test_valid(self, **kwargs: Any) -> None: - self.validate_node(**kwargs) - - -class TypeAliasParserTest(CSTNodeTest): - @data_provider( - ( - { - "node": cst.SimpleStatementLine( - [ - cst.TypeAlias( - cst.Name("foo"), - cst.Name("bar"), - whitespace_after_name=cst.SimpleWhitespace(" "), - ) - ] - ), - "code": "type foo = bar\n", - "parser": parse_statement, - }, - { - "node": cst.SimpleStatementLine( - [ - cst.TypeAlias( - cst.Name("foo"), - cst.Name("bar"), - type_parameters=cst.TypeParameters( - params=[ - cst.TypeParam( - cst.TypeVar( - cst.Name("T"), cst.Name("str"), cst.Colon() - ), - cst.Comma(), - ), - cst.TypeParam( - cst.ParamSpec( - cst.Name("KW"), - whitespace_after_star=cst.SimpleWhitespace( - " " - ), - ), - cst.Comma( - whitespace_before=cst.SimpleWhitespace(" "), - whitespace_after=cst.SimpleWhitespace(" "), - ), - ), - ], - rbracket=cst.RightSquareBracket( - cst.SimpleWhitespace("") - ), - ), - whitespace_after_name=cst.SimpleWhitespace(" "), - whitespace_after_type=cst.SimpleWhitespace(" "), - whitespace_after_equals=cst.SimpleWhitespace(" "), - whitespace_after_type_parameters=cst.SimpleWhitespace(" "), - semicolon=cst.Semicolon( - whitespace_before=cst.SimpleWhitespace(" "), - whitespace_after=cst.SimpleWhitespace(" "), - ), - ) - ] - ), - "code": "type foo [T:str,** KW , ] = bar ; \n", - "parser": parse_statement, - }, - { - "node": cst.SimpleStatementLine( - [ - cst.TypeAlias( - cst.Name("foo"), - type_parameters=cst.TypeParameters( - [ - cst.TypeParam( - cst.TypeVarTuple(cst.Name("P")), - star="*", - equal=cst.AssignEqual(), - default=cst.Name("default"), - ), - ] - ), - value=cst.Name("bar"), - whitespace_after_name=cst.SimpleWhitespace(" "), - whitespace_after_type_parameters=cst.SimpleWhitespace(" "), - ) - ] - ), - "code": "type foo [*P = *default] = bar\n", - "parser": parse_statement, - }, - { - "node": cst.SimpleStatementLine( - [ - cst.TypeAlias( - cst.Name("foo"), - type_parameters=cst.TypeParameters( - [ - cst.TypeParam( - cst.TypeVarTuple(cst.Name("P")), - star="*", - whitespace_after_star=cst.SimpleWhitespace( - " " - ), - equal=cst.AssignEqual(), - default=cst.Name("default"), - ), - ] - ), - value=cst.Name("bar"), - whitespace_after_name=cst.SimpleWhitespace(" "), - whitespace_after_type_parameters=cst.SimpleWhitespace(" "), - ) - ] - ), - "code": "type foo [*P = * default] = bar\n", - "parser": parse_statement, - }, - ) - ) - def test_valid(self, **kwargs: Any) -> None: - self.validate_node(**kwargs) diff --git a/libcst/_nodes/tests/test_with.py b/libcst/_nodes/tests/test_with.py index 0b396619..1310b3f8 100644 --- a/libcst/_nodes/tests/test_with.py +++ b/libcst/_nodes/tests/test_with.py @@ -7,7 +7,9 @@ from typing import Any import libcst as cst from libcst import parse_statement, PartialParserConfig +from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as +from libcst._parser.entrypoints import is_native from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -100,23 +102,6 @@ class WithTest(CSTNodeTest): "code": "with context_mgr() as ctx: pass\n", "parser": parse_statement, }, - { - "node": cst.With( - ( - cst.WithItem( - cst.Call(cst.Name("context_mgr")), - cst.AsName( - cst.Tuple(()), - whitespace_after_as=cst.SimpleWhitespace(""), - whitespace_before_as=cst.SimpleWhitespace(""), - ), - ), - ), - cst.SimpleStatementSuite((cst.Pass(),)), - ), - "code": "with context_mgr()as(): pass\n", - "parser": parse_statement, - }, # indentation { "node": DummyIndentedBlock( @@ -185,14 +170,14 @@ class WithTest(CSTNodeTest): cst.WithItem( cst.Call( cst.Name("context_mgr"), - lpar=(), - rpar=(), + lpar=() if is_native() else (cst.LeftParen(),), + rpar=() if is_native() else (cst.RightParen(),), ) ), ), cst.SimpleStatementSuite((cst.Pass(),)), - lpar=(cst.LeftParen()), - rpar=(cst.RightParen()), + lpar=(cst.LeftParen() if is_native() else MaybeSentinel.DEFAULT), + rpar=(cst.RightParen() if is_native() else MaybeSentinel.DEFAULT), whitespace_after_with=cst.SimpleWhitespace(""), ), "code": "with(context_mgr()): pass\n", @@ -231,7 +216,7 @@ class WithTest(CSTNodeTest): rpar=cst.RightParen(whitespace_before=cst.SimpleWhitespace(" ")), ), "code": ("with ( foo(),\n" " bar(), ): pass\n"), # noqa - "parser": parse_statement, + "parser": parse_statement if is_native() else None, "expected_position": CodeRange((1, 0), (2, 21)), }, ) @@ -308,7 +293,7 @@ class WithTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if not kwargs.get("expect_success", True): + if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/tests/test_yield.py b/libcst/_nodes/tests/test_yield.py index e5085b4d..22a18872 100644 --- a/libcst/_nodes/tests/test_yield.py +++ b/libcst/_nodes/tests/test_yield.py @@ -8,6 +8,7 @@ from typing import Any, Callable, Optional import libcst as cst from libcst import parse_statement from libcst._nodes.tests.base import CSTNodeTest, parse_statement_as +from libcst._parser.entrypoints import is_native from libcst.helpers import ensure_type from libcst.metadata import CodeRange from libcst.testing.utils import data_provider @@ -240,6 +241,6 @@ class YieldParsingTest(CSTNodeTest): ) ) def test_versions(self, **kwargs: Any) -> None: - if not kwargs.get("expect_success", True): + if is_native() and not kwargs.get("expect_success", True): self.skipTest("parse errors are disabled for native parser") self.assert_parses(**kwargs) diff --git a/libcst/_nodes/whitespace.py b/libcst/_nodes/whitespace.py index b1332c13..686c14fb 100644 --- a/libcst/_nodes/whitespace.py +++ b/libcst/_nodes/whitespace.py @@ -48,8 +48,6 @@ class BaseParenthesizableWhitespace(CSTNode, ABC): ``iftest``), it has some semantic value. """ - __slots__ = () - # TODO: Should we somehow differentiate places where we require non-zero whitespace # with a separate type? diff --git a/libcst/_parser/_parsing_check.py b/libcst/_parser/_parsing_check.py deleted file mode 100644 index 03283c95..00000000 --- a/libcst/_parser/_parsing_check.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from typing import Iterable, Union - -from libcst._exceptions import EOFSentinel -from libcst._parser.parso.pgen2.generator import ReservedString -from libcst._parser.parso.python.token import PythonTokenTypes, TokenType -from libcst._parser.types.token import Token - -_EOF_STR: str = "end of file (EOF)" -_INDENT_STR: str = "an indent" -_DEDENT_STR: str = "a dedent" - - -def get_expected_str( - encountered: Union[Token, EOFSentinel], - expected: Union[Iterable[Union[TokenType, ReservedString]], EOFSentinel], -) -> str: - if ( - isinstance(encountered, EOFSentinel) - or encountered.type is PythonTokenTypes.ENDMARKER - ): - encountered_str = _EOF_STR - elif encountered.type is PythonTokenTypes.INDENT: - encountered_str = _INDENT_STR - elif encountered.type is PythonTokenTypes.DEDENT: - encountered_str = _DEDENT_STR - else: - encountered_str = repr(encountered.string) - - if isinstance(expected, EOFSentinel): - expected_names = [_EOF_STR] - else: - expected_names = sorted( - [ - repr(el.name) if isinstance(el, TokenType) else repr(el.value) - for el in expected - ] - ) - - if len(expected_names) > 10: - # There's too many possibilities, so it's probably not useful to list them. - # Instead, let's just abbreviate the message. - return f"Unexpectedly encountered {encountered_str}." - else: - if len(expected_names) == 1: - expected_str = expected_names[0] - else: - expected_str = f"{', '.join(expected_names[:-1])}, or {expected_names[-1]}" - return f"Encountered {encountered_str}, but expected {expected_str}." diff --git a/libcst/_parser/base_parser.py b/libcst/_parser/base_parser.py index d349bb14..ef9e1519 100644 --- a/libcst/_parser/base_parser.py +++ b/libcst/_parser/base_parser.py @@ -26,8 +26,12 @@ from dataclasses import dataclass, field from typing import Generic, Iterable, List, Sequence, TypeVar, Union -from libcst._exceptions import EOFSentinel, ParserSyntaxError, PartialParserSyntaxError -from libcst._parser._parsing_check import get_expected_str +from libcst._exceptions import ( + EOFSentinel, + get_expected_str, + ParserSyntaxError, + PartialParserSyntaxError, +) from libcst._parser.parso.pgen2.generator import DFAState, Grammar, ReservedString from libcst._parser.parso.python.token import TokenType from libcst._parser.types.token import Token @@ -99,7 +103,7 @@ class BaseParser(Generic[_TokenT, _TokenTypeT, _NodeT]): def parse(self) -> _NodeT: # Ensure that we don't re-use parsers. if self.__was_parse_called: - raise ValueError("Each parser object may only be used to parse once.") + raise Exception("Each parser object may only be used to parse once.") self.__was_parse_called = True for token in self.tokens: @@ -125,9 +129,11 @@ class BaseParser(Generic[_TokenT, _TokenTypeT, _NodeT]): def convert_nonterminal( self, nonterminal: str, children: Sequence[_NodeT] - ) -> _NodeT: ... + ) -> _NodeT: + ... - def convert_terminal(self, token: _TokenT) -> _NodeT: ... + def convert_terminal(self, token: _TokenT) -> _NodeT: + ... def _add_token(self, token: _TokenT) -> None: """ diff --git a/libcst/_parser/conversions/expression.py b/libcst/_parser/conversions/expression.py index 79d7ad78..a3ba90ac 100644 --- a/libcst/_parser/conversions/expression.py +++ b/libcst/_parser/conversions/expression.py @@ -12,8 +12,7 @@ from tokenize import ( Intnumber as INTNUMBER_RE, ) -from libcst import CSTLogicError -from libcst._exceptions import ParserSyntaxError, PartialParserSyntaxError +from libcst._exceptions import PartialParserSyntaxError from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.expression import ( Arg, @@ -328,12 +327,7 @@ def convert_boolop( # Convert all of the operations that have no precedence in a loop for op, rightexpr in grouper(rightexprs, 2): if op.string not in BOOLOP_TOKEN_LUT: - raise ParserSyntaxError( - f"Unexpected token '{op.string}'!", - lines=config.lines, - raw_line=0, - raw_column=0, - ) + raise Exception(f"Unexpected token '{op.string}'!") leftexpr = BooleanOperation( left=leftexpr, # pyre-ignore Pyre thinks that the type of the LUT is CSTNode. @@ -426,12 +420,7 @@ def convert_comp_op( ) else: # this should be unreachable - raise ParserSyntaxError( - f"Unexpected token '{op.string}'!", - lines=config.lines, - raw_line=0, - raw_column=0, - ) + raise Exception(f"Unexpected token '{op.string}'!") else: # A two-token comparison leftcomp, rightcomp = children @@ -462,12 +451,7 @@ def convert_comp_op( ) else: # this should be unreachable - raise ParserSyntaxError( - f"Unexpected token '{leftcomp.string} {rightcomp.string}'!", - lines=config.lines, - raw_line=0, - raw_column=0, - ) + raise Exception(f"Unexpected token '{leftcomp.string} {rightcomp.string}'!") @with_production("star_expr", "'*' expr") @@ -509,12 +493,7 @@ def convert_binop( # Convert all of the operations that have no precedence in a loop for op, rightexpr in grouper(rightexprs, 2): if op.string not in BINOP_TOKEN_LUT: - raise ParserSyntaxError( - f"Unexpected token '{op.string}'!", - lines=config.lines, - raw_line=0, - raw_column=0, - ) + raise Exception(f"Unexpected token '{op.string}'!") leftexpr = BinaryOperation( left=leftexpr, # pyre-ignore Pyre thinks that the type of the LUT is CSTNode. @@ -561,12 +540,7 @@ def convert_factor( ) ) else: - raise ParserSyntaxError( - f"Unexpected token '{op.string}'!", - lines=config.lines, - raw_line=0, - raw_column=0, - ) + raise Exception(f"Unexpected token '{op.string}'!") return WithLeadingWhitespace( UnaryOperation(operator=opnode, expression=factor.value), op.whitespace_before @@ -677,7 +651,7 @@ def convert_atom_expr_trailer( ) else: # This is an invalid trailer, so lets give up - raise CSTLogicError() + raise Exception("Logic error!") return WithLeadingWhitespace(atom, whitespace_before) @@ -896,19 +870,9 @@ def convert_atom_basic( Imaginary(child.string), child.whitespace_before ) else: - raise ParserSyntaxError( - f"Unparseable number {child.string}", - lines=config.lines, - raw_line=0, - raw_column=0, - ) + raise Exception("Unparseable number {child.string}") else: - raise ParserSyntaxError( - f"Logic error, unexpected token {child.type.name}", - lines=config.lines, - raw_line=0, - raw_column=0, - ) + raise Exception(f"Logic error, unexpected token {child.type.name}") @with_production("atom_squarebrackets", "'[' [testlist_comp_list] ']'") @@ -1483,7 +1447,7 @@ def convert_arg_assign_comp_for( if equal.string == ":=": val = convert_namedexpr_test(config, children) if not isinstance(val, WithLeadingWhitespace): - raise TypeError( + raise Exception( f"convert_namedexpr_test returned {val!r}, not WithLeadingWhitespace" ) return Arg(value=val.value) diff --git a/libcst/_parser/conversions/params.py b/libcst/_parser/conversions/params.py index 5b29f95d..9ac7f1d1 100644 --- a/libcst/_parser/conversions/params.py +++ b/libcst/_parser/conversions/params.py @@ -6,7 +6,6 @@ from typing import Any, List, Optional, Sequence, Union -from libcst import CSTLogicError from libcst._exceptions import PartialParserSyntaxError from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.expression import ( @@ -122,7 +121,7 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(*abc, *): ... # This should be unreachable, the grammar already disallows it. - raise ValueError( + raise Exception( "Cannot have multiple star ('*') markers in a single argument " + "list." ) @@ -137,7 +136,7 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(foo, /, *, /, bar): ... # This should be unreachable, the grammar already disallows it. - raise ValueError( + raise Exception( "Cannot have multiple slash ('/') markers in a single argument " + "list." ) @@ -169,7 +168,7 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(**kwargs, trailing=None) # This should be unreachable, the grammar already disallows it. - raise ValueError("Cannot have any arguments after a kwargs expansion.") + raise Exception("Cannot have any arguments after a kwargs expansion.") elif ( isinstance(param.star, str) and param.star == "*" and param.default is None ): @@ -182,7 +181,7 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(*first, *second): ... # This should be unreachable, the grammar already disallows it. - raise ValueError( + raise Exception( "Expected a keyword argument but found a starred positional " + "argument expansion." ) @@ -198,13 +197,13 @@ def convert_argslist( # noqa: C901 # Example code: # def fn(**first, **second) # This should be unreachable, the grammar already disallows it. - raise ValueError( + raise Exception( "Multiple starred keyword argument expansions are not allowed in a " + "single argument list" ) else: # The state machine should never end up here. - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") return current_param diff --git a/libcst/_parser/conversions/statement.py b/libcst/_parser/conversions/statement.py index f96c6ea2..608f002f 100644 --- a/libcst/_parser/conversions/statement.py +++ b/libcst/_parser/conversions/statement.py @@ -6,8 +6,7 @@ from typing import Any, Dict, List, Optional, Sequence, Tuple, Type -from libcst import CSTLogicError -from libcst._exceptions import ParserSyntaxError, PartialParserSyntaxError +from libcst._exceptions import PartialParserSyntaxError from libcst._maybe_sentinel import MaybeSentinel from libcst._nodes.expression import ( Annotation, @@ -284,9 +283,7 @@ def convert_annassign(config: ParserConfig, children: Sequence[Any]) -> Any: whitespace_after=parse_simple_whitespace(config, equal.whitespace_after), ) else: - raise ParserSyntaxError( - "Invalid parser state!", lines=config.lines, raw_line=0, raw_column=0 - ) + raise Exception("Invalid parser state!") return AnnAssignPartial( annotation=Annotation( @@ -322,13 +319,7 @@ def convert_annassign(config: ParserConfig, children: Sequence[Any]) -> Any: def convert_augassign(config: ParserConfig, children: Sequence[Any]) -> Any: op, expr = children if op.string not in AUGOP_TOKEN_LUT: - raise ParserSyntaxError( - f"Unexpected token '{op.string}'!", - lines=config.lines, - raw_line=0, - raw_column=0, - ) - + raise Exception(f"Unexpected token '{op.string}'!") return AugAssignPartial( # pyre-ignore Pyre seems to think that the value of this LUT is CSTNode operator=AUGOP_TOKEN_LUT[op.string]( @@ -456,7 +447,7 @@ def convert_import_relative(config: ParserConfig, children: Sequence[Any]) -> An # This should be the dotted name, and we can't get more than # one, but lets be sure anyway if dotted_name is not None: - raise CSTLogicError() + raise Exception("Logic error!") dotted_name = child return ImportRelativePartial(relative=tuple(dots), module=dotted_name) @@ -653,7 +644,7 @@ def convert_raise_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: item=source.value, ) else: - raise CSTLogicError() + raise Exception("Logic error!") return WithLeadingWhitespace( Raise(whitespace_after_raise=whitespace_after_raise, exc=exc, cause=cause), @@ -902,7 +893,7 @@ def convert_try_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: if isinstance(clause, Token): if clause.string == "else": if orelse is not None: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") orelse = Else( leading_lines=parse_empty_lines(config, clause.whitespace_before), whitespace_before_colon=parse_simple_whitespace( @@ -912,7 +903,7 @@ def convert_try_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: ) elif clause.string == "finally": if finalbody is not None: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") finalbody = Finally( leading_lines=parse_empty_lines(config, clause.whitespace_before), whitespace_before_colon=parse_simple_whitespace( @@ -921,7 +912,7 @@ def convert_try_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: body=suite, ) else: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") elif isinstance(clause, ExceptClausePartial): handlers.append( ExceptHandler( @@ -936,7 +927,7 @@ def convert_try_stmt(config: ParserConfig, children: Sequence[Any]) -> Any: ) ) else: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") return Try( leading_lines=parse_empty_lines(config, trytoken.whitespace_before), @@ -1342,7 +1333,7 @@ def convert_asyncable_stmt(config: ParserConfig, children: Sequence[Any]) -> Any asynchronous=asyncnode, leading_lines=leading_lines ) else: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") @with_production("suite", "simple_stmt_suite | indented_suite") diff --git a/libcst/_parser/entrypoints.py b/libcst/_parser/entrypoints.py index bab45ece..461433bd 100644 --- a/libcst/_parser/entrypoints.py +++ b/libcst/_parser/entrypoints.py @@ -9,6 +9,7 @@ parser. A parser entrypoint should take the source code and some configuration information """ +import os from functools import partial from typing import Union @@ -16,12 +17,19 @@ from libcst._nodes.base import CSTNode from libcst._nodes.expression import BaseExpression from libcst._nodes.module import Module from libcst._nodes.statement import BaseCompoundStatement, SimpleStatementLine -from libcst._parser.detect_config import convert_to_utf8 +from libcst._parser.detect_config import convert_to_utf8, detect_config +from libcst._parser.grammar import get_grammar, validate_grammar +from libcst._parser.python_parser import PythonCSTParser from libcst._parser.types.config import PartialParserConfig _DEFAULT_PARTIAL_PARSER_CONFIG: PartialParserConfig = PartialParserConfig() +def is_native() -> bool: + typ = os.environ.get("LIBCST_PARSER_TYPE", None) + return typ == "native" + + def _parse( entrypoint: str, source: Union[str, bytes], @@ -30,21 +38,57 @@ def _parse( detect_trailing_newline: bool, detect_default_newline: bool, ) -> CSTNode: + if is_native(): + from libcst.native import parse_expression, parse_module, parse_statement - encoding, source_str = convert_to_utf8(source, partial=config) + encoding, source_str = convert_to_utf8(source, partial=config) - from libcst import native + if entrypoint == "file_input": + parse = partial(parse_module, encoding=encoding) + elif entrypoint == "stmt_input": + parse = parse_statement + elif entrypoint == "expression_input": + parse = parse_expression + else: + raise ValueError(f"Unknown parser entry point: {entrypoint}") - if entrypoint == "file_input": - parse = partial(native.parse_module, encoding=encoding) - elif entrypoint == "stmt_input": - parse = native.parse_statement - elif entrypoint == "expression_input": - parse = native.parse_expression - else: - raise ValueError(f"Unknown parser entry point: {entrypoint}") + return parse(source_str) + return _pure_python_parse( + entrypoint, + source, + config, + detect_trailing_newline=detect_trailing_newline, + detect_default_newline=detect_default_newline, + ) - return parse(source_str) + +def _pure_python_parse( + entrypoint: str, + source: Union[str, bytes], + config: PartialParserConfig, + *, + detect_trailing_newline: bool, + detect_default_newline: bool, +) -> CSTNode: + detection_result = detect_config( + source, + partial=config, + detect_trailing_newline=detect_trailing_newline, + detect_default_newline=detect_default_newline, + ) + validate_grammar() + grammar = get_grammar(config.parsed_python_version, config.future_imports) + + parser = PythonCSTParser( + tokens=detection_result.tokens, + config=detection_result.config, + pgen_grammar=grammar, + start_nonterminal=entrypoint, + ) + # The parser has an Any return type, we can at least refine it to CSTNode here. + result = parser.parse() + assert isinstance(result, CSTNode) + return result def parse_module( diff --git a/libcst/_parser/grammar.py b/libcst/_parser/grammar.py index ee65ef72..8e6ade59 100644 --- a/libcst/_parser/grammar.py +++ b/libcst/_parser/grammar.py @@ -319,7 +319,7 @@ def validate_grammar() -> None: production_name = fn_productions[0].name expected_name = f"convert_{production_name}" if fn.__name__ != expected_name: - raise ValueError( + raise Exception( f"The conversion function for '{production_name}' " + f"must be called '{expected_name}', not '{fn.__name__}'." ) @@ -330,7 +330,7 @@ def _get_version_comparison(version: str) -> Tuple[str, PythonVersionInfo]: return (version[:2], parse_version_string(version[2:].strip())) if version[:1] in (">", "<"): return (version[:1], parse_version_string(version[1:].strip())) - raise ValueError(f"Invalid version comparison specifier '{version}'") + raise Exception(f"Invalid version comparison specifier '{version}'") def _compare_versions( @@ -350,7 +350,7 @@ def _compare_versions( return actual_version > requested_version if comparison == "<": return actual_version < requested_version - raise ValueError(f"Invalid version comparison specifier '{comparison}'") + raise Exception(f"Invalid version comparison specifier '{comparison}'") def _should_include( @@ -405,7 +405,7 @@ def get_nonterminal_conversions( if not _should_include_future(fn_production.future, future_imports): continue if fn_production.name in conversions: - raise ValueError( + raise Exception( f"Found duplicate '{fn_production.name}' production in grammar" ) conversions[fn_production.name] = fn diff --git a/libcst/_parser/parso/pgen2/generator.py b/libcst/_parser/parso/pgen2/generator.py index 5e83741b..4e20e89d 100644 --- a/libcst/_parser/parso/pgen2/generator.py +++ b/libcst/_parser/parso/pgen2/generator.py @@ -72,9 +72,9 @@ class DFAState(Generic[_TokenTypeT]): def __init__(self, from_rule: str, nfa_set: Set[NFAState], final: NFAState) -> None: self.from_rule = from_rule self.nfa_set = nfa_set - self.arcs: Mapping[str, DFAState] = ( - {} - ) # map from terminals/nonterminals to DFAState + self.arcs: Mapping[ + str, DFAState + ] = {} # map from terminals/nonterminals to DFAState # In an intermediary step we set these nonterminal arcs (which has the # same structure as arcs). These don't contain terminals anymore. self.nonterminal_arcs: Mapping[str, DFAState] = {} @@ -259,7 +259,7 @@ def generate_grammar(bnf_grammar: str, token_namespace: Any) -> Grammar[Any]: _calculate_tree_traversal(rule_to_dfas) if start_nonterminal is None: - raise ValueError("could not find starting nonterminal!") + raise Exception("could not find starting nonterminal!") return Grammar(start_nonterminal, rule_to_dfas, reserved_strings) diff --git a/libcst/_parser/parso/pgen2/grammar_parser.py b/libcst/_parser/parso/pgen2/grammar_parser.py index 0d30199d..5d0f2229 100644 --- a/libcst/_parser/parso/pgen2/grammar_parser.py +++ b/libcst/_parser/parso/pgen2/grammar_parser.py @@ -93,10 +93,14 @@ class GrammarParser: def _parse_items(self): # items: item+ a, b = self._parse_item() - while self.type in ( - PythonTokenTypes.NAME, - PythonTokenTypes.STRING, - ) or self.value in ("(", "["): + while ( + self.type + in ( + PythonTokenTypes.NAME, + PythonTokenTypes.STRING, + ) + or self.value in ("(", "[") + ): c, d = self._parse_item() # Need to end on the next item. b.add_arc(c) diff --git a/libcst/_parser/parso/python/token.py b/libcst/_parser/parso/python/token.py index 164262b9..de883719 100644 --- a/libcst/_parser/parso/python/token.py +++ b/libcst/_parser/parso/python/token.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -26,8 +26,9 @@ try: ERRORTOKEN: TokenType = native_token_type.ERRORTOKEN ERROR_DEDENT: TokenType = native_token_type.ERROR_DEDENT + except ImportError: - from libcst._parser.parso.python.py_token import ( # noqa: F401 + from libcst._parser.parso.python.py_token import ( # noqa F401 PythonTokenTypes, TokenType, ) diff --git a/libcst/_parser/parso/python/tokenize.py b/libcst/_parser/parso/python/tokenize.py index 711a8785..bfd159dd 100644 --- a/libcst/_parser/parso/python/tokenize.py +++ b/libcst/_parser/parso/python/tokenize.py @@ -36,7 +36,6 @@ from collections import namedtuple from dataclasses import dataclass from typing import Dict, Generator, Iterable, Optional, Pattern, Set, Tuple -from libcst import CSTLogicError from libcst._parser.parso.python.token import PythonTokenTypes from libcst._parser.parso.utils import PythonVersionInfo, split_lines @@ -523,14 +522,14 @@ def _tokenize_lines_py36_or_below( # noqa: C901 if contstr: # continued string if endprog is None: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") endmatch = endprog.match(line) if endmatch: pos = endmatch.end(0) if contstr_start is None: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") if stashed is not None: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") yield PythonToken(STRING, contstr + line[:pos], contstr_start, prefix) contstr = "" contline = None @@ -548,7 +547,7 @@ def _tokenize_lines_py36_or_below( # noqa: C901 ) if string: if stashed is not None: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") yield PythonToken( FSTRING_STRING, string, @@ -573,7 +572,7 @@ def _tokenize_lines_py36_or_below( # noqa: C901 pos += quote_length if fstring_end_token is not None: if stashed is not None: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") yield fstring_end_token continue @@ -886,12 +885,12 @@ def _tokenize_lines_py37_or_above( # noqa: C901 if contstr: # continued string if endprog is None: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") endmatch = endprog.match(line) if endmatch: pos = endmatch.end(0) if contstr_start is None: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") yield PythonToken(STRING, contstr + line[:pos], contstr_start, prefix) contstr = "" contline = None diff --git a/libcst/_parser/parso/tests/test_utils.py b/libcst/_parser/parso/tests/test_utils.py index 1f548ef4..e033fe9d 100644 --- a/libcst/_parser/parso/tests/test_utils.py +++ b/libcst/_parser/parso/tests/test_utils.py @@ -39,8 +39,8 @@ class ParsoUtilsTest(UnitTest): # Invalid line breaks ("a\vb", ["a\vb"], False), ("a\vb", ["a\vb"], True), - ("\x1c", ["\x1c"], False), - ("\x1c", ["\x1c"], True), + ("\x1C", ["\x1C"], False), + ("\x1C", ["\x1C"], True), ) ) def test_split_lines(self, string, expected_result, keepends): diff --git a/libcst/_parser/parso/utils.py b/libcst/_parser/parso/utils.py index 54517123..0e4b4949 100644 --- a/libcst/_parser/parso/utils.py +++ b/libcst/_parser/parso/utils.py @@ -29,9 +29,9 @@ from typing import Optional, Sequence, Tuple, Union _NON_LINE_BREAKS = ( "\v", # Vertical Tabulation 0xB "\f", # Form Feed 0xC - "\x1c", # File Separator - "\x1d", # Group Separator - "\x1e", # Record Separator + "\x1C", # File Separator + "\x1D", # Group Separator + "\x1E", # Record Separator "\x85", # Next Line (NEL - Equivalent to CR+LF. # Used to mark end-of-line on some IBM mainframes.) "\u2028", # Line Separator @@ -114,11 +114,11 @@ def python_bytes_to_unicode( return b"utf-8" # pyre-ignore Pyre can't see that Union[str, bytes] conforms to AnyStr. - first_two_match = re.match(rb"(?:[^\n]*\n){0,2}", source) + first_two_match = re.match(br"(?:[^\n]*\n){0,2}", source) if first_two_match is None: return encoding first_two_lines = first_two_match.group(0) - possible_encoding = re.search(rb"coding[=:]\s*([-\w.]+)", first_two_lines) + possible_encoding = re.search(br"coding[=:]\s*([-\w.]+)", first_two_lines) if possible_encoding: return possible_encoding.group(1) else: diff --git a/libcst/_parser/production_decorator.py b/libcst/_parser/production_decorator.py index d5ba52de..41a817f8 100644 --- a/libcst/_parser/production_decorator.py +++ b/libcst/_parser/production_decorator.py @@ -39,7 +39,7 @@ def with_production( # pyre-ignore: Pyre doesn't think that fn has a __name__ attribute fn_name = fn.__name__ if not fn_name.startswith("convert_"): - raise ValueError( + raise Exception( "A function with a production must be named 'convert_X', not " + f"'{fn_name}'." ) diff --git a/libcst/_parser/py_whitespace_parser.py b/libcst/_parser/py_whitespace_parser.py index 6b6573a6..a3c53a97 100644 --- a/libcst/_parser/py_whitespace_parser.py +++ b/libcst/_parser/py_whitespace_parser.py @@ -1,11 +1,10 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import List, Optional, Sequence, Tuple, Union -from libcst import CSTLogicError, ParserSyntaxError from libcst._nodes.whitespace import ( Comment, COMMENT_RE, @@ -104,13 +103,10 @@ def parse_trailing_whitespace( ) -> TrailingWhitespace: trailing_whitespace = _parse_trailing_whitespace(config, state) if trailing_whitespace is None: - raise ParserSyntaxError( + raise Exception( "Internal Error: Failed to parse TrailingWhitespace. This should never " + "happen because a TrailingWhitespace is never optional in the grammar, " - + "so this error should've been caught by parso first.", - lines=config.lines, - raw_line=state.line, - raw_column=state.column, + + "so this error should've been caught by parso first." ) return trailing_whitespace @@ -181,9 +177,7 @@ def _parse_indent( if state.column == len(line_str) and state.line == len(config.lines): # We're at EOF, treat this as a failed speculative parse return False - raise CSTLogicError( - "Internal Error: Column should be 0 when parsing an indent." - ) + raise Exception("Internal Error: Column should be 0 when parsing an indent.") if line_str.startswith(absolute_indent, state.column): state.column += len(absolute_indent) return True @@ -212,12 +206,7 @@ def _parse_newline( newline_str = newline_match.group(0) state.column += len(newline_str) if state.column != len(line_str): - raise ParserSyntaxError( - "Internal Error: Found a newline, but it wasn't the EOL.", - lines=config.lines, - raw_line=state.line, - raw_column=state.column, - ) + raise Exception("Internal Error: Found a newline, but it wasn't the EOL.") if state.line < len(config.lines): # this newline was the end of a line, and there's another line, # therefore we should move to the next line diff --git a/libcst/_parser/tests/test_parse_errors.py b/libcst/_parser/tests/test_parse_errors.py index 7697893d..331dd81c 100644 --- a/libcst/_parser/tests/test_parse_errors.py +++ b/libcst/_parser/tests/test_parse_errors.py @@ -6,10 +6,9 @@ from textwrap import dedent from typing import Callable -from unittest.mock import patch import libcst as cst -from libcst._nodes.base import CSTValidationError +from libcst._parser.entrypoints import is_native from libcst.testing.utils import data_provider, UnitTest @@ -171,11 +170,5 @@ class ParseErrorsTest(UnitTest): ) -> None: with self.assertRaises(cst.ParserSyntaxError) as cm: parse_fn() - # make sure str() doesn't blow up - self.assertIn("Syntax Error", str(cm.exception)) - - def test_native_fallible_into_py(self) -> None: - with patch("libcst._nodes.expression.Name._validate") as await_validate: - await_validate.side_effect = CSTValidationError("validate is broken") - with self.assertRaises((SyntaxError, cst.ParserSyntaxError)): - cst.parse_module("foo") + if not is_native(): + self.assertEqual(str(cm.exception), expected) diff --git a/libcst/_parser/types/config.py b/libcst/_parser/types/config.py index 289fd8ae..1fc32371 100644 --- a/libcst/_parser/types/config.py +++ b/libcst/_parser/types/config.py @@ -27,9 +27,9 @@ except ImportError: BaseWhitespaceParserConfig = config_mod.BaseWhitespaceParserConfig ParserConfig = config_mod.ParserConfig -parser_config_asdict: Callable[[ParserConfig], Mapping[str, Any]] = ( - config_mod.parser_config_asdict -) +parser_config_asdict: Callable[ + [ParserConfig], Mapping[str, Any] +] = config_mod.parser_config_asdict class AutoConfig(Enum): diff --git a/libcst/_parser/types/py_config.py b/libcst/_parser/types/py_config.py index d7732591..cb91d558 100644 --- a/libcst/_parser/types/py_config.py +++ b/libcst/_parser/types/py_config.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/py_token.py b/libcst/_parser/types/py_token.py index d2f9b537..7fac5eb8 100644 --- a/libcst/_parser/types/py_token.py +++ b/libcst/_parser/types/py_token.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/py_whitespace_state.py b/libcst/_parser/types/py_whitespace_state.py index 6359e83e..99047590 100644 --- a/libcst/_parser/types/py_whitespace_state.py +++ b/libcst/_parser/types/py_whitespace_state.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/libcst/_parser/types/token.py b/libcst/_parser/types/token.py index 54d904ef..32c85ccf 100644 --- a/libcst/_parser/types/token.py +++ b/libcst/_parser/types/token.py @@ -9,4 +9,4 @@ try: Token = tokenize.Token except ImportError: - from libcst._parser.types.py_token import Token # noqa: F401 + from libcst._parser.types.py_token import Token # noqa F401 diff --git a/libcst/_position.py b/libcst/_position.py index e81e9ab4..d7ba0d07 100644 --- a/libcst/_position.py +++ b/libcst/_position.py @@ -40,10 +40,12 @@ class CodeRange: end: CodePosition @overload - def __init__(self, start: CodePosition, end: CodePosition) -> None: ... + def __init__(self, start: CodePosition, end: CodePosition) -> None: + ... @overload - def __init__(self, start: Tuple[int, int], end: Tuple[int, int]) -> None: ... + def __init__(self, start: Tuple[int, int], end: Tuple[int, int]) -> None: + ... def __init__(self, start: _CodePositionT, end: _CodePositionT) -> None: if isinstance(start, tuple) and isinstance(end, tuple): diff --git a/libcst/_type_enforce.py b/libcst/_type_enforce.py index dded4525..3eb72e77 100644 --- a/libcst/_type_enforce.py +++ b/libcst/_type_enforce.py @@ -3,21 +3,16 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import ( - Any, - ClassVar, - ForwardRef, - get_args, - get_origin, - Iterable, - Literal, - Mapping, - MutableMapping, - MutableSequence, - Tuple, - TypeVar, - Union, -) +from typing import Any, Iterable, Mapping, MutableMapping, MutableSequence, Tuple + +from typing_extensions import Literal +from typing_inspect import get_args, get_origin, is_classvar, is_typevar, is_union_type + +try: # py37+ + from typing import ForwardRef +except ImportError: # py36 + # pyre-fixme[21]: Could not find name `_ForwardRef` in `typing` (stubbed). + from typing import _ForwardRef as ForwardRef def is_value_of_type( # noqa: C901 "too complex" @@ -51,11 +46,15 @@ def is_value_of_type( # noqa: C901 "too complex" - Forward Refs -- use `typing.get_type_hints` to resolve these - Type[...] """ - if expected_type is ClassVar or get_origin(expected_type) is ClassVar: - classvar_args = get_args(expected_type) - expected_type = (classvar_args[0] or Any) if classvar_args else Any + if is_classvar(expected_type): + # `ClassVar` (no subscript) is implicitly `ClassVar[Any]` + if hasattr(expected_type, "__type__"): # py36 + expected_type = expected_type.__type__ or Any + else: # py37+ + classvar_args = get_args(expected_type) + expected_type = (classvar_args[0] or Any) if classvar_args else Any - if type(expected_type) is TypeVar: + if is_typevar(expected_type): # treat this the same as Any # TODO: evaluate bounds return True @@ -65,13 +64,16 @@ def is_value_of_type( # noqa: C901 "too complex" if expected_origin_type == Any: return True - elif expected_type is Union or get_origin(expected_type) is Union: + elif is_union_type(expected_type): return any( is_value_of_type(value, subtype) for subtype in expected_type.__args__ ) elif isinstance(expected_origin_type, type(Literal)): - literal_values = get_args(expected_type) + if hasattr(expected_type, "__values__"): # py36 + literal_values = expected_type.__values__ + else: # py37+ + literal_values = get_args(expected_type, evaluate=True) return any(value == literal for literal in literal_values) elif isinstance(expected_origin_type, ForwardRef): @@ -85,11 +87,14 @@ def is_value_of_type( # noqa: C901 "too complex" if not isinstance(value, tuple): return False - type_args = get_args(expected_type) + type_args = get_args(expected_type, evaluate=True) if len(type_args) == 0: # `Tuple` (no subscript) is implicitly `Tuple[Any, ...]` return True + if type_args is None: + return True + if len(value) != len(type_args): return False # TODO: Handle `Tuple[T, ...]` like `Iterable[T]` @@ -106,7 +111,7 @@ def is_value_of_type( # noqa: C901 "too complex" if not issubclass(type(value), expected_origin_type): return False - type_args = get_args(expected_type) + type_args = get_args(expected_type, evaluate=True) if len(type_args) == 0: # `Mapping` (no subscript) is implicitly `Mapping[Any, Any]`. return True @@ -143,7 +148,7 @@ def is_value_of_type( # noqa: C901 "too complex" if not issubclass(type(value), expected_origin_type): return False - type_args = get_args(expected_type) + type_args = get_args(expected_type, evaluate=True) if len(type_args) == 0: # `Iterable` (no subscript) is implicitly `Iterable[Any]`. return True diff --git a/libcst/_typed_visitor.py b/libcst/_typed_visitor.py index 8816f619..a880bee4 100644 --- a/libcst/_typed_visitor.py +++ b/libcst/_typed_visitor.py @@ -25,7 +25,6 @@ if TYPE_CHECKING: BaseExpression, BaseFormattedStringContent, BaseSlice, - BaseTemplatedStringContent, BinaryOperation, BooleanOperation, Call, @@ -72,9 +71,6 @@ if TYPE_CHECKING: StarredElement, Subscript, SubscriptElement, - TemplatedString, - TemplatedStringExpression, - TemplatedStringText, Tuple, UnaryOperation, Yield, @@ -182,7 +178,6 @@ if TYPE_CHECKING: MatchValue, NameItem, Nonlocal, - ParamSpec, Pass, Raise, Return, @@ -190,11 +185,6 @@ if TYPE_CHECKING: SimpleStatementSuite, Try, TryStar, - TypeAlias, - TypeParam, - TypeParameters, - TypeVar, - TypeVarTuple, While, With, WithItem, @@ -211,7 +201,6 @@ if TYPE_CHECKING: class CSTTypedBaseFunctions: - @mark_no_op def visit_Add(self, node: "Add") -> Optional[bool]: pass @@ -1064,22 +1053,6 @@ class CSTTypedBaseFunctions: def leave_ClassDef_whitespace_before_colon(self, node: "ClassDef") -> None: pass - @mark_no_op - def visit_ClassDef_type_parameters(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_type_parameters(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def visit_ClassDef_whitespace_after_type_parameters(self, node: "ClassDef") -> None: - pass - - @mark_no_op - def leave_ClassDef_whitespace_after_type_parameters(self, node: "ClassDef") -> None: - pass - @mark_no_op def visit_Colon(self, node: "Colon") -> Optional[bool]: pass @@ -2366,26 +2339,6 @@ class CSTTypedBaseFunctions: def leave_FunctionDef_whitespace_before_colon(self, node: "FunctionDef") -> None: pass - @mark_no_op - def visit_FunctionDef_type_parameters(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def leave_FunctionDef_type_parameters(self, node: "FunctionDef") -> None: - pass - - @mark_no_op - def visit_FunctionDef_whitespace_after_type_parameters( - self, node: "FunctionDef" - ) -> None: - pass - - @mark_no_op - def leave_FunctionDef_whitespace_after_type_parameters( - self, node: "FunctionDef" - ) -> None: - pass - @mark_no_op def visit_GeneratorExp(self, node: "GeneratorExp") -> Optional[bool]: pass @@ -2854,22 +2807,6 @@ class CSTTypedBaseFunctions: def leave_Index_value(self, node: "Index") -> None: pass - @mark_no_op - def visit_Index_star(self, node: "Index") -> None: - pass - - @mark_no_op - def leave_Index_star(self, node: "Index") -> None: - pass - - @mark_no_op - def visit_Index_whitespace_after_star(self, node: "Index") -> None: - pass - - @mark_no_op - def leave_Index_whitespace_after_star(self, node: "Index") -> None: - pass - @mark_no_op def visit_Integer(self, node: "Integer") -> Optional[bool]: pass @@ -4354,34 +4291,6 @@ class CSTTypedBaseFunctions: def leave_ParamSlash_comma(self, node: "ParamSlash") -> None: pass - @mark_no_op - def visit_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def leave_ParamSlash_whitespace_after(self, node: "ParamSlash") -> None: - pass - - @mark_no_op - def visit_ParamSpec(self, node: "ParamSpec") -> Optional[bool]: - pass - - @mark_no_op - def visit_ParamSpec_name(self, node: "ParamSpec") -> None: - pass - - @mark_no_op - def leave_ParamSpec_name(self, node: "ParamSpec") -> None: - pass - - @mark_no_op - def visit_ParamSpec_whitespace_after_star(self, node: "ParamSpec") -> None: - pass - - @mark_no_op - def leave_ParamSpec_whitespace_after_star(self, node: "ParamSpec") -> None: - pass - @mark_no_op def visit_ParamStar(self, node: "ParamStar") -> Optional[bool]: pass @@ -5186,140 +5095,6 @@ class CSTTypedBaseFunctions: def leave_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None: pass - @mark_no_op - def visit_TemplatedString(self, node: "TemplatedString") -> Optional[bool]: - pass - - @mark_no_op - def visit_TemplatedString_parts(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def leave_TemplatedString_parts(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def visit_TemplatedString_start(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def leave_TemplatedString_start(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def visit_TemplatedString_end(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def leave_TemplatedString_end(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def visit_TemplatedString_lpar(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def leave_TemplatedString_lpar(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def visit_TemplatedString_rpar(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def leave_TemplatedString_rpar(self, node: "TemplatedString") -> None: - pass - - @mark_no_op - def visit_TemplatedStringExpression( - self, node: "TemplatedStringExpression" - ) -> Optional[bool]: - pass - - @mark_no_op - def visit_TemplatedStringExpression_expression( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_TemplatedStringExpression_expression( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_TemplatedStringExpression_conversion( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_TemplatedStringExpression_conversion( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_TemplatedStringExpression_format_spec( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_TemplatedStringExpression_format_spec( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_TemplatedStringExpression_whitespace_before_expression( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_TemplatedStringExpression_whitespace_before_expression( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_TemplatedStringExpression_whitespace_after_expression( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_TemplatedStringExpression_whitespace_after_expression( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_TemplatedStringExpression_equal( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_TemplatedStringExpression_equal( - self, node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def visit_TemplatedStringText(self, node: "TemplatedStringText") -> Optional[bool]: - pass - - @mark_no_op - def visit_TemplatedStringText_value(self, node: "TemplatedStringText") -> None: - pass - - @mark_no_op - def leave_TemplatedStringText_value(self, node: "TemplatedStringText") -> None: - pass - @mark_no_op def visit_TrailingWhitespace(self, node: "TrailingWhitespace") -> Optional[bool]: pass @@ -5480,206 +5255,6 @@ class CSTTypedBaseFunctions: def leave_Tuple_rpar(self, node: "Tuple") -> None: pass - @mark_no_op - def visit_TypeAlias(self, node: "TypeAlias") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeAlias_name(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_name(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_value(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_value(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_type_parameters(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_type_parameters(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_whitespace_after_type(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_whitespace_after_type(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_whitespace_after_name(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_whitespace_after_name(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_whitespace_after_type_parameters( - self, node: "TypeAlias" - ) -> None: - pass - - @mark_no_op - def leave_TypeAlias_whitespace_after_type_parameters( - self, node: "TypeAlias" - ) -> None: - pass - - @mark_no_op - def visit_TypeAlias_whitespace_after_equals(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_whitespace_after_equals(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeAlias_semicolon(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeAlias_semicolon(self, node: "TypeAlias") -> None: - pass - - @mark_no_op - def visit_TypeParam(self, node: "TypeParam") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeParam_param(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParam_param(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def visit_TypeParam_comma(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParam_comma(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def visit_TypeParam_equal(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParam_equal(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def visit_TypeParam_star(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParam_star(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def visit_TypeParam_whitespace_after_star(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParam_whitespace_after_star(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def visit_TypeParam_default(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParam_default(self, node: "TypeParam") -> None: - pass - - @mark_no_op - def visit_TypeParameters(self, node: "TypeParameters") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeParameters_params(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def leave_TypeParameters_params(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def visit_TypeParameters_lbracket(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def leave_TypeParameters_lbracket(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def visit_TypeParameters_rbracket(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def leave_TypeParameters_rbracket(self, node: "TypeParameters") -> None: - pass - - @mark_no_op - def visit_TypeVar(self, node: "TypeVar") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeVar_name(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def leave_TypeVar_name(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def visit_TypeVar_bound(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def leave_TypeVar_bound(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def visit_TypeVar_colon(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def leave_TypeVar_colon(self, node: "TypeVar") -> None: - pass - - @mark_no_op - def visit_TypeVarTuple(self, node: "TypeVarTuple") -> Optional[bool]: - pass - - @mark_no_op - def visit_TypeVarTuple_name(self, node: "TypeVarTuple") -> None: - pass - - @mark_no_op - def leave_TypeVarTuple_name(self, node: "TypeVarTuple") -> None: - pass - - @mark_no_op - def visit_TypeVarTuple_whitespace_after_star(self, node: "TypeVarTuple") -> None: - pass - - @mark_no_op - def leave_TypeVarTuple_whitespace_after_star(self, node: "TypeVarTuple") -> None: - pass - @mark_no_op def visit_UnaryOperation(self, node: "UnaryOperation") -> Optional[bool]: pass @@ -5902,7 +5477,6 @@ class CSTTypedBaseFunctions: class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): - @mark_no_op def leave_Add(self, original_node: "Add") -> None: pass @@ -6405,10 +5979,6 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): def leave_ParamSlash(self, original_node: "ParamSlash") -> None: pass - @mark_no_op - def leave_ParamSpec(self, original_node: "ParamSpec") -> None: - pass - @mark_no_op def leave_ParamStar(self, original_node: "ParamStar") -> None: pass @@ -6523,20 +6093,6 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): def leave_SubtractAssign(self, original_node: "SubtractAssign") -> None: pass - @mark_no_op - def leave_TemplatedString(self, original_node: "TemplatedString") -> None: - pass - - @mark_no_op - def leave_TemplatedStringExpression( - self, original_node: "TemplatedStringExpression" - ) -> None: - pass - - @mark_no_op - def leave_TemplatedStringText(self, original_node: "TemplatedStringText") -> None: - pass - @mark_no_op def leave_TrailingWhitespace(self, original_node: "TrailingWhitespace") -> None: pass @@ -6553,26 +6109,6 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): def leave_Tuple(self, original_node: "Tuple") -> None: pass - @mark_no_op - def leave_TypeAlias(self, original_node: "TypeAlias") -> None: - pass - - @mark_no_op - def leave_TypeParam(self, original_node: "TypeParam") -> None: - pass - - @mark_no_op - def leave_TypeParameters(self, original_node: "TypeParameters") -> None: - pass - - @mark_no_op - def leave_TypeVar(self, original_node: "TypeVar") -> None: - pass - - @mark_no_op - def leave_TypeVarTuple(self, original_node: "TypeVarTuple") -> None: - pass - @mark_no_op def leave_UnaryOperation(self, original_node: "UnaryOperation") -> None: pass @@ -6595,6 +6131,7 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions): class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): + pass @mark_no_op def leave_Add(self, original_node: "Add", updated_node: "Add") -> "BaseBinaryOp": @@ -7372,12 +6909,6 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): ) -> Union["ParamSlash", MaybeSentinel]: return updated_node - @mark_no_op - def leave_ParamSpec( - self, original_node: "ParamSpec", updated_node: "ParamSpec" - ) -> "ParamSpec": - return updated_node - @mark_no_op def leave_ParamStar( self, original_node: "ParamStar", updated_node: "ParamStar" @@ -7525,7 +7056,7 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): @mark_no_op def leave_StarredElement( self, original_node: "StarredElement", updated_node: "StarredElement" - ) -> "BaseExpression": + ) -> Union["BaseElement", FlattenSentinel["BaseElement"], RemovalSentinel]: return updated_node @mark_no_op @@ -7554,34 +7085,6 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): ) -> "BaseAugOp": return updated_node - @mark_no_op - def leave_TemplatedString( - self, original_node: "TemplatedString", updated_node: "TemplatedString" - ) -> "BaseExpression": - return updated_node - - @mark_no_op - def leave_TemplatedStringExpression( - self, - original_node: "TemplatedStringExpression", - updated_node: "TemplatedStringExpression", - ) -> Union[ - "BaseTemplatedStringContent", - FlattenSentinel["BaseTemplatedStringContent"], - RemovalSentinel, - ]: - return updated_node - - @mark_no_op - def leave_TemplatedStringText( - self, original_node: "TemplatedStringText", updated_node: "TemplatedStringText" - ) -> Union[ - "BaseTemplatedStringContent", - FlattenSentinel["BaseTemplatedStringContent"], - RemovalSentinel, - ]: - return updated_node - @mark_no_op def leave_TrailingWhitespace( self, original_node: "TrailingWhitespace", updated_node: "TrailingWhitespace" @@ -7606,38 +7109,6 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions): ) -> "BaseExpression": return updated_node - @mark_no_op - def leave_TypeAlias( - self, original_node: "TypeAlias", updated_node: "TypeAlias" - ) -> Union[ - "BaseSmallStatement", FlattenSentinel["BaseSmallStatement"], RemovalSentinel - ]: - return updated_node - - @mark_no_op - def leave_TypeParam( - self, original_node: "TypeParam", updated_node: "TypeParam" - ) -> Union["TypeParam", FlattenSentinel["TypeParam"], RemovalSentinel]: - return updated_node - - @mark_no_op - def leave_TypeParameters( - self, original_node: "TypeParameters", updated_node: "TypeParameters" - ) -> "TypeParameters": - return updated_node - - @mark_no_op - def leave_TypeVar( - self, original_node: "TypeVar", updated_node: "TypeVar" - ) -> "TypeVar": - return updated_node - - @mark_no_op - def leave_TypeVarTuple( - self, original_node: "TypeVarTuple", updated_node: "TypeVarTuple" - ) -> "TypeVarTuple": - return updated_node - @mark_no_op def leave_UnaryOperation( self, original_node: "UnaryOperation", updated_node: "UnaryOperation" diff --git a/libcst/_typed_visitor_base.py b/libcst/_typed_visitor_base.py index de751a15..3b1bd2db 100644 --- a/libcst/_typed_visitor_base.py +++ b/libcst/_typed_visitor_base.py @@ -3,8 +3,10 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any, Callable, cast, TypeVar +from typing import Any, Callable, cast, TYPE_CHECKING, TypeVar +if TYPE_CHECKING: + from libcst._typed_visitor import CSTTypedBaseFunctions # noqa: F401 # pyre-fixme[24]: Generic type `Callable` expects 2 type parameters. F = TypeVar("F", bound=Callable) diff --git a/libcst/_types.py b/libcst/_types.py index 24055a5c..8df90ee5 100644 --- a/libcst/_types.py +++ b/libcst/_types.py @@ -4,8 +4,7 @@ # LICENSE file in the root directory of this source tree. -from pathlib import PurePath -from typing import TYPE_CHECKING, TypeVar, Union +from typing import TYPE_CHECKING, TypeVar if TYPE_CHECKING: from libcst._nodes.base import CSTNode # noqa: F401 @@ -13,4 +12,3 @@ if TYPE_CHECKING: CSTNodeT = TypeVar("CSTNodeT", bound="CSTNode") CSTNodeT_co = TypeVar("CSTNodeT_co", bound="CSTNode", covariant=True) -StrPath = Union[str, PurePath] diff --git a/libcst/codegen/gather.py b/libcst/codegen/gather.py index 5eeaa7d3..7d7fa8d6 100644 --- a/libcst/codegen/gather.py +++ b/libcst/codegen/gather.py @@ -7,12 +7,12 @@ import inspect from collections import defaultdict from collections.abc import Sequence as ABCSequence from dataclasses import dataclass, fields, replace -from typing import Dict, Iterator, List, Mapping, Sequence, Set, Type, Union +from typing import Dict, Generator, List, Mapping, Sequence, Set, Type, Union import libcst as cst -def _get_bases() -> Iterator[Type[cst.CSTNode]]: +def _get_bases() -> Generator[Type[cst.CSTNode], None, None]: """ Get all base classes that are subclasses of CSTNode but not an actual node itself. This allows us to keep our types sane by refering to the @@ -27,11 +27,11 @@ def _get_bases() -> Iterator[Type[cst.CSTNode]]: typeclasses: Sequence[Type[cst.CSTNode]] = sorted( - _get_bases(), key=lambda base: base.__name__ + list(_get_bases()), key=lambda base: base.__name__ ) -def _get_nodes() -> Iterator[Type[cst.CSTNode]]: +def _get_nodes() -> Generator[Type[cst.CSTNode], None, None]: """ Grab all CSTNodes that are not a superclass. Basically, anything that a person might use to generate a tree. @@ -53,7 +53,7 @@ def _get_nodes() -> Iterator[Type[cst.CSTNode]]: all_libcst_nodes: Sequence[Type[cst.CSTNode]] = sorted( - _get_nodes(), key=lambda node: node.__name__ + list(_get_nodes()), key=lambda node: node.__name__ ) node_to_bases: Dict[Type[cst.CSTNode], List[Type[cst.CSTNode]]] = {} for node in all_libcst_nodes: diff --git a/libcst/codegen/gen_matcher_classes.py b/libcst/codegen/gen_matcher_classes.py index e6def68c..c8453a5d 100644 --- a/libcst/codegen/gen_matcher_classes.py +++ b/libcst/codegen/gen_matcher_classes.py @@ -8,7 +8,7 @@ from dataclasses import dataclass, fields from typing import Generator, List, Optional, Sequence, Set, Tuple, Type, Union import libcst as cst -from libcst import CSTLogicError, ensure_type, parse_expression +from libcst import ensure_type, parse_expression from libcst.codegen.gather import all_libcst_nodes, typeclasses CST_DIR: Set[str] = set(dir(cst)) @@ -16,109 +16,6 @@ CLASS_RE = r"" OPTIONAL_RE = r"typing\.Union\[([^,]*?), NoneType]" -class NormalizeUnions(cst.CSTTransformer): - """ - Convert a binary operation with | operators into a Union type. - For example, converts `foo | bar | baz` into `typing.Union[foo, bar, baz]`. - Special case: converts `foo | None` or `None | foo` into `typing.Optional[foo]`. - Also flattens nested typing.Union types. - """ - - def leave_Subscript( - self, original_node: cst.Subscript, updated_node: cst.Subscript - ) -> cst.Subscript: - # Check if this is a typing.Union - if ( - isinstance(updated_node.value, cst.Attribute) - and isinstance(updated_node.value.value, cst.Name) - and updated_node.value.attr.value == "Union" - and updated_node.value.value.value == "typing" - ): - # Collect all operands from any nested Unions - operands: List[cst.BaseExpression] = [] - for slc in updated_node.slice: - if not isinstance(slc.slice, cst.Index): - continue - value = slc.slice.value - # If this is a nested Union, add its elements - if ( - isinstance(value, cst.Subscript) - and isinstance(value.value, cst.Attribute) - and isinstance(value.value.value, cst.Name) - and value.value.attr.value == "Union" - and value.value.value.value == "typing" - ): - operands.extend( - nested_slc.slice.value - for nested_slc in value.slice - if isinstance(nested_slc.slice, cst.Index) - ) - else: - operands.append(value) - - # flatten operands into a Union type - return cst.Subscript( - cst.Attribute(cst.Name("typing"), cst.Name("Union")), - [cst.SubscriptElement(cst.Index(operand)) for operand in operands], - ) - return updated_node - - def leave_BinaryOperation( - self, original_node: cst.BinaryOperation, updated_node: cst.BinaryOperation - ) -> Union[cst.BinaryOperation, cst.Subscript]: - if not updated_node.operator.deep_equals(cst.BitOr()): - return updated_node - - def flatten_binary_op(node: cst.BaseExpression) -> List[cst.BaseExpression]: - """Flatten a binary operation tree into a list of operands.""" - if not isinstance(node, cst.BinaryOperation): - # If it's a Union type, extract its elements - if ( - isinstance(node, cst.Subscript) - and isinstance(node.value, cst.Attribute) - and isinstance(node.value.value, cst.Name) - and node.value.attr.value == "Union" - and node.value.value.value == "typing" - ): - return [ - slc.slice.value - for slc in node.slice - if isinstance(slc.slice, cst.Index) - ] - return [node] - if not node.operator.deep_equals(cst.BitOr()): - return [node] - - left_operands = flatten_binary_op(node.left) - right_operands = flatten_binary_op(node.right) - return left_operands + right_operands - - # Flatten the binary operation tree into a list of operands - operands = flatten_binary_op(updated_node) - - # Check for Optional case (None in union) - none_count = sum( - 1 for op in operands if isinstance(op, cst.Name) and op.value == "None" - ) - if none_count == 1 and len(operands) == 2: - # This is an Optional case - find the non-None operand - non_none = next( - op - for op in operands - if not (isinstance(op, cst.Name) and op.value == "None") - ) - return cst.Subscript( - cst.Attribute(cst.Name("typing"), cst.Name("Optional")), - [cst.SubscriptElement(cst.Index(non_none))], - ) - - # Regular Union case - return cst.Subscript( - cst.Attribute(cst.Name("typing"), cst.Name("Union")), - [cst.SubscriptElement(cst.Index(operand)) for operand in operands], - ) - - class CleanseFullTypeNames(cst.CSTTransformer): def leave_Call( self, original_node: cst.Call, updated_node: cst.Call @@ -283,9 +180,9 @@ class AddWildcardsToSequenceUnions(cst.CSTTransformer): # type blocks, even for sequence types. return if len(node.slice) != 1: - raise ValueError( + raise Exception( "Unexpected number of sequence elements inside Sequence type " - "annotation!" + + "annotation!" ) nodeslice = node.slice[0].slice if isinstance(nodeslice, cst.Index): @@ -368,9 +265,7 @@ def _get_raw_name(node: cst.CSTNode) -> Optional[str]: if isinstance(node, cst.Name): return node.value elif isinstance(node, cst.SimpleString): - evaluated_value = node.evaluated_value - if isinstance(evaluated_value, str): - return evaluated_value + return node.evaluated_value elif isinstance(node, cst.SubscriptElement): return _get_raw_name(node.slice) elif isinstance(node, cst.Index): @@ -449,14 +344,10 @@ def _get_clean_type_from_subscript( if typecst.value.deep_equals(cst.Name("Sequence")): # Lets attempt to widen the sequence type and alias it. if len(typecst.slice) != 1: - raise CSTLogicError( - "Logic error, Sequence shouldn't have more than one param!" - ) + raise Exception("Logic error, Sequence shouldn't have more than one param!") inner_type = typecst.slice[0].slice if not isinstance(inner_type, cst.Index): - raise CSTLogicError( - "Logic error, expecting Index for only Sequence element!" - ) + raise Exception("Logic error, expecting Index for only Sequence element!") inner_type = inner_type.value if isinstance(inner_type, cst.Subscript): @@ -464,9 +355,7 @@ def _get_clean_type_from_subscript( elif isinstance(inner_type, (cst.Name, cst.SimpleString)): clean_inner_type = _get_clean_type_from_expression(aliases, inner_type) else: - raise CSTLogicError( - f"Logic error, unexpected type in Sequence: {type(inner_type)}!" - ) + raise Exception("Logic error, unexpected type in Sequence!") return _get_wrapped_union_type( typecst.deep_replace(inner_type, clean_inner_type), @@ -495,12 +384,9 @@ def _get_clean_type_and_aliases( typestr = re.sub(OPTIONAL_RE, r"typing.Optional[\1]", typestr) # Now, parse the expression with LibCST. - + cleanser = CleanseFullTypeNames() typecst = parse_expression(typestr) - typecst = typecst.visit(NormalizeUnions()) - assert isinstance(typecst, cst.BaseExpression) - typecst = typecst.visit(CleanseFullTypeNames()) - assert isinstance(typecst, cst.BaseExpression) + typecst = typecst.visit(cleanser) aliases: List[Alias] = [] # Now, convert the type to allow for MetadataMatchType and MatchIfTrue values. @@ -509,7 +395,7 @@ def _get_clean_type_and_aliases( elif isinstance(typecst, (cst.Name, cst.SimpleString)): clean_type = _get_clean_type_from_expression(aliases, typecst) else: - raise CSTLogicError(f"Logic error, unexpected top level type: {type(typecst)}!") + raise Exception("Logic error, unexpected top level type!") # Now, insert OneOf/AllOf and MatchIfTrue into unions so we can typecheck their usage. # This allows us to put OneOf[SomeType] or MatchIfTrue[cst.SomeType] into any @@ -555,7 +441,8 @@ generated_code.append("") generated_code.append("") generated_code.append("# This file was generated by libcst.codegen.gen_matcher_classes") generated_code.append("from dataclasses import dataclass") -generated_code.append("from typing import Literal, Optional, Sequence, Union") +generated_code.append("from typing import Optional, Sequence, Union") +generated_code.append("from typing_extensions import Literal") generated_code.append("import libcst as cst") generated_code.append("") generated_code.append( @@ -660,7 +547,7 @@ for node in all_libcst_nodes: # Make sure to add an __all__ for flake8 and compatibility with "from libcst.matchers import *" -generated_code.append(f"__all__ = {repr(sorted(all_exports))}") +generated_code.append(f"__all__ = {repr(sorted(list(all_exports)))}") if __name__ == "__main__": diff --git a/libcst/codegen/gen_type_mapping.py b/libcst/codegen/gen_type_mapping.py index cc31783d..a5af2bd9 100644 --- a/libcst/codegen/gen_type_mapping.py +++ b/libcst/codegen/gen_type_mapping.py @@ -29,7 +29,7 @@ generated_code.append("") generated_code.append("") for module, objects in imports.items(): generated_code.append(f"from {module} import (") - generated_code.append(f" {', '.join(sorted(objects))}") + generated_code.append(f" {', '.join(sorted(list(objects)))}") generated_code.append(")") # Generate the base visit_ methods diff --git a/libcst/codegen/gen_visitor_functions.py b/libcst/codegen/gen_visitor_functions.py index 36d21a5e..ef369cfa 100644 --- a/libcst/codegen/gen_visitor_functions.py +++ b/libcst/codegen/gen_visitor_functions.py @@ -32,7 +32,7 @@ generated_code.append("") generated_code.append("if TYPE_CHECKING:") for module, objects in imports.items(): generated_code.append(f" from {module} import ( # noqa: F401") - generated_code.append(f" {', '.join(sorted(objects))}") + generated_code.append(f" {', '.join(sorted(list(objects)))}") generated_code.append(" )") @@ -87,6 +87,7 @@ for node in sorted(nodebases.keys(), key=lambda node: node.__name__): generated_code.append("") generated_code.append("") generated_code.append("class CSTTypedTransformerFunctions(CSTTypedBaseFunctions):") +generated_code.append(" pass") for node in sorted(nodebases.keys(), key=lambda node: node.__name__): name = node.__name__ if name.startswith("Base"): @@ -110,7 +111,6 @@ for node in sorted(nodebases.keys(), key=lambda node: node.__name__): ) generated_code.append(" return updated_node") - if __name__ == "__main__": # Output the code print("\n".join(generated_code)) diff --git a/libcst/codegen/generate.py b/libcst/codegen/generate.py index 92f13176..74a418f5 100644 --- a/libcst/codegen/generate.py +++ b/libcst/codegen/generate.py @@ -25,11 +25,8 @@ from libcst.codegen.transforms import ( def format_file(fname: str) -> None: - subprocess.check_call( - ["ufmt", "format", fname], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) + with open(os.devnull, "w") as devnull: + subprocess.check_call(["ufmt", "format", fname], stdout=devnull, stderr=devnull) def clean_generated_code(code: str) -> str: @@ -68,11 +65,12 @@ def codegen_visitors() -> None: # Now, see if the file we generated causes any import errors # by attempting to run codegen again in a new process. - subprocess.check_call( - [sys.executable, "-m", "libcst.codegen.gen_visitor_functions"], - cwd=base, - stdout=subprocess.DEVNULL, - ) + with open(os.devnull, "w") as devnull: + subprocess.check_call( + ["python3", "-m", "libcst.codegen.gen_visitor_functions"], + cwd=base, + stdout=devnull, + ) # If it worked, lets format the file format_file(visitors_file) diff --git a/libcst/codegen/tests/test_codegen_clean.py b/libcst/codegen/tests/test_codegen_clean.py index 5ee5903d..7b71ba36 100644 --- a/libcst/codegen/tests/test_codegen_clean.py +++ b/libcst/codegen/tests/test_codegen_clean.py @@ -3,7 +3,6 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import difflib import os import os.path @@ -21,20 +20,12 @@ class TestCodegenClean(UnitTest): new_code: str, module_name: str, ) -> None: - if old_code != new_code: - diff = difflib.unified_diff( - old_code.splitlines(keepends=True), - new_code.splitlines(keepends=True), - fromfile="old_code", - tofile="new_code", - ) - diff_str = "".join(diff) - self.fail( - f"{module_name} needs new codegen, see " - + "`python -m libcst.codegen.generate --help` " - + "for instructions, or run `python -m libcst.codegen.generate all`. " - + f"Diff:\n{diff_str}" - ) + self.assertTrue( + old_code == new_code, + f"{module_name} needs new codegen, see " + + "`python -m libcst.codegen.generate --help` " + + "for instructions, or run `python -m libcst.codegen.generate all`", + ) def test_codegen_clean_visitor_functions(self) -> None: """ @@ -132,50 +123,3 @@ class TestCodegenClean(UnitTest): # Now that we've done simple codegen, verify that it matches. self.assert_code_matches(old_code, new_code, "libcst.matchers._return_types") - - def test_normalize_unions(self) -> None: - """ - Verifies that NormalizeUnions correctly converts binary operations with | - into Union types, with special handling for Optional cases. - """ - import libcst as cst - from libcst.codegen.gen_matcher_classes import NormalizeUnions - - def assert_transforms_to(input_code: str, expected_code: str) -> None: - input_cst = cst.parse_expression(input_code) - expected_cst = cst.parse_expression(expected_code) - - result = input_cst.visit(NormalizeUnions()) - assert isinstance( - result, cst.BaseExpression - ), f"Expected BaseExpression, got {type(result)}" - - result_code = cst.Module(body=()).code_for_node(result) - expected_code_str = cst.Module(body=()).code_for_node(expected_cst) - - self.assertEqual( - result_code, - expected_code_str, - f"Expected {expected_code_str}, got {result_code}", - ) - - # Test regular union case - assert_transforms_to("foo | bar | baz", "typing.Union[foo, bar, baz]") - - # Test Optional case (None on right) - assert_transforms_to("foo | None", "typing.Optional[foo]") - - # Test Optional case (None on left) - assert_transforms_to("None | foo", "typing.Optional[foo]") - - # Test case with more than 2 operands including None (should remain Union) - assert_transforms_to("foo | bar | None", "typing.Union[foo, bar, None]") - - # Flatten existing Union types - assert_transforms_to( - "typing.Union[foo, typing.Union[bar, baz]]", "typing.Union[foo, bar, baz]" - ) - # Merge two kinds of union types - assert_transforms_to( - "foo | typing.Union[bar, baz]", "typing.Union[foo, bar, baz]" - ) diff --git a/libcst/codemod/_cli.py b/libcst/codemod/_cli.py index d9c70d05..a7b18783 100644 --- a/libcst/codemod/_cli.py +++ b/libcst/codemod/_cli.py @@ -8,25 +8,20 @@ Provides helpers for CLI interaction. """ import difflib -import functools import os.path import re import subprocess import sys import time import traceback -from concurrent.futures import as_completed, Executor -from copy import deepcopy -from dataclasses import dataclass -from multiprocessing import cpu_count -from pathlib import Path -from typing import AnyStr, Callable, cast, Dict, List, Optional, Sequence, Type, Union -from warnings import warn +from dataclasses import dataclass, replace +from multiprocessing import cpu_count, Pool +from pathlib import Path, PurePath +from typing import Any, AnyStr, cast, Dict, List, Optional, Sequence, Union from libcst import parse_module, PartialParserConfig from libcst.codemod._codemod import Codemod -from libcst.codemod._context import CodemodContext -from libcst.codemod._dummy_pool import DummyExecutor +from libcst.codemod._dummy_pool import DummyPool from libcst.codemod._runner import ( SkipFile, SkipReason, @@ -37,7 +32,6 @@ from libcst.codemod._runner import ( TransformSkip, TransformSuccess, ) -from libcst.helpers import calculate_module_and_package from libcst.metadata import FullRepoManager _DEFAULT_GENERATED_CODE_MARKER: str = f"@gen{''}erated" @@ -51,7 +45,7 @@ def invoke_formatter(formatter_args: Sequence[str], code: AnyStr) -> AnyStr: # Make sure there is something to run if len(formatter_args) == 0: - raise ValueError("No formatter configured but code formatting requested.") + raise Exception("No formatter configured but code formatting requested.") # Invoke the formatter, giving it the code as stdin and assuming the formatted # code comes from stdout. @@ -95,10 +89,7 @@ def gather_files( ret.extend( str(p) for p in Path(fd).rglob("*.py*") - if Path.is_file(p) - and ( - str(p).endswith("py") or (include_stubs and str(p).endswith("pyi")) - ) + if str(p).endswith("py") or (include_stubs and str(p).endswith("pyi")) ) return sorted(ret) @@ -193,6 +184,30 @@ def exec_transform_with_prettyprint( return maybe_code +def _calculate_module(repo_root: Optional[str], filename: str) -> Optional[str]: + # Given an absolute repo_root and an absolute filename, calculate the + # python module name for the file. + if repo_root is None: + # We don't have a repo root, so this is impossible to calculate. + return None + + try: + relative_filename = PurePath(filename).relative_to(repo_root) + except ValueError: + # This file seems to be out of the repo root. + return None + + # get rid of extension + relative_filename = relative_filename.with_suffix("") + + # get rid of any special cases + if relative_filename.stem in ["__init__", "__main__"]: + relative_filename = relative_filename.parent + + # Now, convert to dots to represent the python module. + return ".".join(relative_filename.parts) + + @dataclass(frozen=True) class ExecutionResult: # File we have results for @@ -215,52 +230,11 @@ class ExecutionConfig: unified_diff: Optional[int] = None -def _prepare_context( - repo_root: str, +def _execute_transform( # noqa: C901 + transformer: Codemod, filename: str, - scratch: Dict[str, object], - repo_manager: Optional[FullRepoManager], -) -> CodemodContext: - # determine the module and package name for this file - try: - module_name_and_package = calculate_module_and_package(repo_root, filename) - mod_name = module_name_and_package.name - pkg_name = module_name_and_package.package - except ValueError as ex: - print(f"Failed to determine module name for {filename}: {ex}", file=sys.stderr) - mod_name = None - pkg_name = None - return CodemodContext( - scratch=scratch, - filename=filename, - full_module_name=mod_name, - full_package_name=pkg_name, - metadata_manager=repo_manager, - ) - - -def _instantiate_transformer( - transformer: Union[Codemod, Type[Codemod]], - repo_root: str, - filename: str, - original_scratch: Dict[str, object], - codemod_kwargs: Dict[str, object], - repo_manager: Optional[FullRepoManager], -) -> Codemod: - if isinstance(transformer, type): - return transformer( # type: ignore - context=_prepare_context(repo_root, filename, {}, repo_manager), - **codemod_kwargs, - ) - transformer.context = _prepare_context( - repo_root, filename, deepcopy(original_scratch), repo_manager - ) - return transformer - - -def _check_for_skip( - filename: str, config: ExecutionConfig -) -> Union[ExecutionResult, bytes]: + config: ExecutionConfig, +) -> ExecutionResult: for pattern in config.blacklist_patterns: if re.fullmatch(pattern, filename): return ExecutionResult( @@ -272,46 +246,33 @@ def _check_for_skip( ), ) - with open(filename, "rb") as fp: - oldcode = fp.read() - - # Skip generated files - if ( - not config.include_generated - and config.generated_code_marker.encode("utf-8") in oldcode - ): - return ExecutionResult( - filename=filename, - changed=False, - transform_result=TransformSkip( - skip_reason=SkipReason.GENERATED, - skip_description="Generated file.", - ), - ) - return oldcode - - -def _execute_transform( - transformer: Union[Codemod, Type[Codemod]], - filename: str, - config: ExecutionConfig, - original_scratch: Dict[str, object], - codemod_args: Optional[Dict[str, object]], - repo_manager: Optional[FullRepoManager], -) -> ExecutionResult: - warnings: list[str] = [] try: - oldcode = _check_for_skip(filename, config) - if isinstance(oldcode, ExecutionResult): - return oldcode + with open(filename, "rb") as fp: + oldcode = fp.read() - transformer_instance = _instantiate_transformer( - transformer, - config.repo_root or ".", - filename, - original_scratch, - codemod_args or {}, - repo_manager, + # Skip generated files + if ( + not config.include_generated + and config.generated_code_marker.encode("utf-8") in oldcode + ): + return ExecutionResult( + filename=filename, + changed=False, + transform_result=TransformSkip( + skip_reason=SkipReason.GENERATED, + skip_description="Generated file.", + ), + ) + + # Somewhat gross hack to provide the filename in the transform's context. + # We do this after the fork so that a context that was initialized with + # some defaults before calling parallel_exec_transform_with_prettyprint + # will be updated per-file. + transformer.context = replace( + transformer.context, + filename=filename, + full_module_name=_calculate_module(config.repo_root, filename), + scratch={}, ) # Run the transform, bail if we failed or if we aren't formatting code @@ -324,26 +285,55 @@ def _execute_transform( else PartialParserConfig() ), ) - output_tree = transformer_instance.transform_module(input_tree) + output_tree = transformer.transform_module(input_tree) newcode = output_tree.bytes encoding = output_tree.encoding - warnings.extend(transformer_instance.context.warnings) + except KeyboardInterrupt: + return ExecutionResult( + filename=filename, changed=False, transform_result=TransformExit() + ) except SkipFile as ex: - warnings.extend(transformer_instance.context.warnings) return ExecutionResult( filename=filename, changed=False, transform_result=TransformSkip( skip_reason=SkipReason.OTHER, skip_description=str(ex), - warning_messages=warnings, + warning_messages=transformer.context.warnings, + ), + ) + except Exception as ex: + return ExecutionResult( + filename=filename, + changed=False, + transform_result=TransformFailure( + error=ex, + traceback_str=traceback.format_exc(), + warning_messages=transformer.context.warnings, ), ) # Call formatter if needed, but only if we actually changed something in this # file if config.format_code and newcode != oldcode: - newcode = invoke_formatter(config.formatter_args, newcode) + try: + newcode = invoke_formatter(config.formatter_args, newcode) + except KeyboardInterrupt: + return ExecutionResult( + filename=filename, + changed=False, + transform_result=TransformExit(), + ) + except Exception as ex: + return ExecutionResult( + filename=filename, + changed=False, + transform_result=TransformFailure( + error=ex, + traceback_str=traceback.format_exc(), + warning_messages=transformer.context.warnings, + ), + ) # Format as unified diff if needed, otherwise save it back changed = oldcode != newcode @@ -366,14 +356,13 @@ def _execute_transform( return ExecutionResult( filename=filename, changed=changed, - transform_result=TransformSuccess(warning_messages=warnings, code=newcode), + transform_result=TransformSuccess( + warning_messages=transformer.context.warnings, code=newcode + ), ) - except KeyboardInterrupt: return ExecutionResult( - filename=filename, - changed=False, - transform_result=TransformExit(warning_messages=warnings), + filename=filename, changed=False, transform_result=TransformExit() ) except Exception as ex: return ExecutionResult( @@ -382,7 +371,7 @@ def _execute_transform( transform_result=TransformFailure( error=ex, traceback_str=traceback.format_exc(), - warning_messages=warnings, + warning_messages=transformer.context.warnings, ), ) @@ -435,7 +424,7 @@ class Progress: operations still to do. """ - if files_finished <= 0 or elapsed_seconds == 0: + if files_finished <= 0: # Technically infinite but calculating sounds better. return "[calculating]" @@ -493,7 +482,7 @@ def _print_parallel_result( ) # In unified diff mode, the code is a diff we must print. - if unified_diff and result.code: + if unified_diff: print(result.code) @@ -519,8 +508,15 @@ class ParallelTransformResult: skips: int +# Unfortunate wrapper required since there is no `istarmap_unordered`... +def _execute_transform_wrap( + job: Dict[str, Any], +) -> ExecutionResult: + return _execute_transform(**job) + + def parallel_exec_transform_with_prettyprint( # noqa: C901 - transform: Union[Codemod, Type[Codemod]], + transform: Codemod, files: Sequence[str], *, jobs: Optional[int] = None, @@ -536,52 +532,41 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 blacklist_patterns: Sequence[str] = (), python_version: Optional[str] = None, repo_root: Optional[str] = None, - codemod_args: Optional[Dict[str, object]] = None, ) -> ParallelTransformResult: """ - Given a list of files and a codemod we should apply to them, fork and apply the - codemod in parallel to all of the files, including any configured formatter. The - ``jobs`` parameter controls the maximum number of in-flight transforms, and needs to - be at least 1. If not included, the number of jobs will automatically be set to the - number of CPU cores. If ``unified_diff`` is set to a number, changes to files will - be printed to stdout with ``unified_diff`` lines of context. If it is set to - ``None`` or left out, files themselves will be updated with changes and formatting. - If a ``python_version`` is provided, then we will parse each source file using this - version. Otherwise, we will use the version of the currently executing python + Given a list of files and an instantiated codemod we should apply to them, + fork and apply the codemod in parallel to all of the files, including any + configured formatter. The ``jobs`` parameter controls the maximum number of + in-flight transforms, and needs to be at least 1. If not included, the number + of jobs will automatically be set to the number of CPU cores. If ``unified_diff`` + is set to a number, changes to files will be printed to stdout with + ``unified_diff`` lines of context. If it is set to ``None`` or left out, files + themselves will be updated with changes and formatting. If a + ``python_version`` is provided, then we will parse each source file using + this version. Otherwise, we will use the version of the currently executing python binary. - A progress indicator as well as any generated warnings will be printed to stderr. To - supress the interactive progress indicator, set ``hide_progress`` to ``True``. Files - that include the generated code marker will be skipped unless the - ``include_generated`` parameter is set to ``True``. Similarly, files that match a - supplied blacklist of regex patterns will be skipped. Warnings for skipping both - blacklisted and generated files will be printed to stderr along with warnings - generated by the codemod unless ``hide_blacklisted`` and ``hide_generated`` are set - to ``True``. Files that were successfully codemodded will not be printed to stderr - unless ``show_successes`` is set to ``True``. + A progress indicator as well as any generated warnings will be printed to stderr. + To supress the interactive progress indicator, set ``hide_progress`` to ``True``. + Files that include the generated code marker will be skipped unless the + ``include_generated`` parameter is set to ``True``. Similarly, files that match + a supplied blacklist of regex patterns will be skipped. Warnings for skipping + both blacklisted and generated files will be printed to stderr along with + warnings generated by the codemod unless ``hide_blacklisted`` and + ``hide_generated`` are set to ``True``. Files that were successfully codemodded + will not be printed to stderr unless ``show_successes`` is set to ``True``. - We take a :class:`~libcst.codemod._codemod.Codemod` class, or an instantiated - :class:`~libcst.codemod._codemod.Codemod`. In the former case, the codemod will be - instantiated for each file, with ``codemod_args`` passed in to the constructor. - Passing an already instantiated :class:`~libcst.codemod._codemod.Codemod` is - deprecated, because it leads to sharing of the - :class:`~libcst.codemod._codemod.Codemod` instance across files, which is a common - source of hard-to-track-down bugs when the :class:`~libcst.codemod._codemod.Codemod` - tracks its state on the instance. + To make this API possible, we take an instantiated transform. This is due to + the fact that lambdas are not pickleable and pickling functions is undefined. + This means we're implicitly relying on fork behavior on UNIX-like systems, and + this function will not work on Windows systems. To create a command-line utility + that runs on Windows, please instead see + :func:`~libcst.codemod.exec_transform_with_prettyprint`. """ - if isinstance(transform, Codemod): - warn( - "Passing transformer instances to `parallel_exec_transform_with_prettyprint` " - "is deprecated and will break in a future version. " - "Please pass the transformer class instead.", - DeprecationWarning, - stacklevel=2, - ) - # Ensure that we have no duplicates, otherwise we might get race conditions # on write. - files = sorted({os.path.abspath(f) for f in files}) + files = sorted(list({os.path.abspath(f) for f in files})) total = len(files) progress = Progress(enabled=not hide_progress, total=total) @@ -593,12 +578,11 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 ) if jobs < 1: - raise ValueError("Must have at least one job to process!") + raise Exception("Must have at least one job to process!") if total == 0: return ParallelTransformResult(successes=0, failures=0, skips=0, warnings=0) - metadata_manager: Optional[FullRepoManager] = None if repo_root is not None: # Make sure if there is a root that we have the absolute path to it. repo_root = os.path.abspath(repo_root) @@ -611,7 +595,10 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 transform.get_inherited_dependencies(), ) metadata_manager.resolve_cache() - + transform.context = replace( + transform.context, + metadata_manager=metadata_manager, + ) print("Executing codemod...", file=sys.stderr) config = ExecutionConfig( @@ -625,16 +612,13 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 python_version=python_version, ) - pool_impl: Callable[[], Executor] if total == 1 or jobs == 1: # Simple case, we should not pay for process overhead. - # Let's just use a dummy synchronous executor. + # Let's just use a dummy synchronous pool. jobs = 1 - pool_impl = DummyExecutor - elif getattr(sys, "_is_gil_enabled", lambda: True)(): # pyre-ignore[16] - from concurrent.futures import ProcessPoolExecutor - - pool_impl = functools.partial(ProcessPoolExecutor, max_workers=jobs) + pool_impl = DummyPool + else: + pool_impl = Pool # Warm the parser, pre-fork. parse_module( "", @@ -644,35 +628,25 @@ def parallel_exec_transform_with_prettyprint( # noqa: C901 else PartialParserConfig() ), ) - else: - from concurrent.futures import ThreadPoolExecutor - - pool_impl = functools.partial(ThreadPoolExecutor, max_workers=jobs) successes: int = 0 failures: int = 0 warnings: int = 0 skips: int = 0 - original_scratch = ( - deepcopy(transform.context.scratch) if isinstance(transform, Codemod) else {} - ) - with pool_impl() as executor: # type: ignore + with pool_impl(processes=jobs) as p: # type: ignore + args = [ + { + "transformer": transform, + "filename": filename, + "config": config, + } + for filename in files + ] try: - futures = [ - executor.submit( - _execute_transform, - transformer=transform, - filename=filename, - config=config, - original_scratch=original_scratch, - codemod_args=codemod_args, - repo_manager=metadata_manager, - ) - for filename in files - ] - for future in as_completed(futures): - result = future.result() + for result in p.imap_unordered( + _execute_transform_wrap, args, chunksize=chunksize + ): # Print an execution result, keep track of failures _print_parallel_result( result, diff --git a/libcst/codemod/_codemod.py b/libcst/codemod/_codemod.py index e267f154..c0c3b2c7 100644 --- a/libcst/codemod/_codemod.py +++ b/libcst/codemod/_codemod.py @@ -56,9 +56,9 @@ class Codemod(MetadataDependent, ABC): """ module = self.context.module if module is None: - raise ValueError( + raise Exception( f"Attempted access of {self.__class__.__name__}.module outside of " - "transform_module()." + + "transform_module()." ) return module diff --git a/libcst/codemod/_command.py b/libcst/codemod/_command.py index b7784d30..55a57247 100644 --- a/libcst/codemod/_command.py +++ b/libcst/codemod/_command.py @@ -3,14 +3,12 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -from __future__ import annotations - import argparse import inspect from abc import ABC, abstractmethod -from typing import Dict, Generator, List, Tuple, Type, TypeVar +from typing import Dict, Generator, List, Type, TypeVar -from libcst import CSTNode, Module +from libcst import Module from libcst.codemod._codemod import Codemod from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer @@ -67,28 +65,6 @@ class CodemodCommand(Codemod, ABC): """ ... - # Lightweight wrappers for RemoveImportsVisitor static functions - def remove_unused_import( - self, - module: str, - obj: str | None = None, - asname: str | None = None, - ) -> None: - RemoveImportsVisitor.remove_unused_import(self.context, module, obj, asname) - - def remove_unused_import_by_node(self, node: CSTNode) -> None: - RemoveImportsVisitor.remove_unused_import_by_node(self.context, node) - - # Lightweight wrappers for AddImportsVisitor static functions - def add_needed_import( - self, - module: str, - obj: str | None = None, - asname: str | None = None, - relative: int = 0, - ) -> None: - AddImportsVisitor.add_needed_import(self.context, module, obj, asname, relative) - def transform_module(self, tree: Module) -> Module: # Overrides (but then calls) Codemod's transform_module to provide # a spot where additional supported transforms can be attached and run. @@ -99,13 +75,13 @@ class CodemodCommand(Codemod, ABC): # have a static method that other transforms can use which takes # a context and other optional args and modifies its own context key # accordingly. We import them here so that we don't have circular imports. - supported_transforms: List[Tuple[str, Type[Codemod]]] = [ - (AddImportsVisitor.CONTEXT_KEY, AddImportsVisitor), - (RemoveImportsVisitor.CONTEXT_KEY, RemoveImportsVisitor), - ] + supported_transforms: Dict[str, Type[Codemod]] = { + AddImportsVisitor.CONTEXT_KEY: AddImportsVisitor, + RemoveImportsVisitor.CONTEXT_KEY: RemoveImportsVisitor, + } # For any visitors that we support auto-running, run them here if needed. - for key, transform in supported_transforms: + for key, transform in supported_transforms.items(): if key in self.context.scratch: # We have work to do, so lets run this. tree = self._instantiate_and_run(transform, tree) diff --git a/libcst/codemod/_context.py b/libcst/codemod/_context.py index 47373df4..04154859 100644 --- a/libcst/codemod/_context.py +++ b/libcst/codemod/_context.py @@ -44,12 +44,6 @@ class CodemodContext: #: in the repo named ``foo/bar/baz.py``. full_module_name: Optional[str] = None - #: The current package if a codemod is being executed against a file that - #: lives on disk, and the repository root is correctly configured. This - #: Will take the form of a dotted name such as ``foo.bar`` for a file - #: in the repo named ``foo/bar/baz.py`` - full_package_name: Optional[str] = None - #: The current top level metadata wrapper for the module being modified. #: To access computed metadata when inside an actively running codemod, use #: the :meth:`~libcst.MetadataDependent.get_metadata` method on diff --git a/libcst/codemod/_dummy_pool.py b/libcst/codemod/_dummy_pool.py index aa23a7d4..c4a24932 100644 --- a/libcst/codemod/_dummy_pool.py +++ b/libcst/codemod/_dummy_pool.py @@ -3,47 +3,37 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import sys -from concurrent.futures import Executor, Future from types import TracebackType -from typing import Callable, Optional, Type, TypeVar +from typing import Callable, Generator, Iterable, Optional, Type, TypeVar -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - -Return = TypeVar("Return") -Params = ParamSpec("Params") +RetT = TypeVar("RetT") +ArgT = TypeVar("ArgT") -class DummyExecutor(Executor): +class DummyPool: """ - Synchronous dummy `concurrent.futures.Executor` analogue. + Synchronous dummy `multiprocessing.Pool` analogue. """ - def submit( + def __init__(self, processes: Optional[int] = None) -> None: + pass + + def imap_unordered( self, - fn: Callable[Params, Return], - /, - *args: Params.args, - **kwargs: Params.kwargs, - ) -> Future[Return]: - future: Future[Return] = Future() - try: - result = fn(*args, **kwargs) - future.set_result(result) - except Exception as exc: - future.set_exception(exc) - return future + func: Callable[[ArgT], RetT], + iterable: Iterable[ArgT], + chunksize: Optional[int] = None, + ) -> Generator[RetT, None, None]: + for args in iterable: + yield func(args) - def __enter__(self) -> "DummyExecutor": + def __enter__(self) -> "DummyPool": return self def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: Optional[Type[Exception]], + exc: Optional[Exception], + tb: Optional[TracebackType], ) -> None: pass diff --git a/libcst/codemod/_visitor.py b/libcst/codemod/_visitor.py index 89248838..ab915c49 100644 --- a/libcst/codemod/_visitor.py +++ b/libcst/codemod/_visitor.py @@ -6,7 +6,7 @@ from typing import Mapping import libcst as cst -from libcst import MetadataDependent, MetadataException +from libcst import MetadataDependent from libcst.codemod._codemod import Codemod from libcst.codemod._context import CodemodContext from libcst.matchers import MatcherDecoratableTransformer, MatcherDecoratableVisitor @@ -69,14 +69,14 @@ class ContextAwareVisitor(MatcherDecoratableVisitor, MetadataDependent): if dependencies: wrapper = self.context.wrapper if wrapper is None: - raise MetadataException( + raise Exception( f"Attempting to instantiate {self.__class__.__name__} outside of " + "an active transform. This means that metadata hasn't been " + "calculated and we cannot successfully create this visitor." ) for dep in dependencies: if dep not in wrapper._metadata: - raise MetadataException( + raise Exception( f"Attempting to access metadata {dep.__name__} that was not a " + "declared dependency of parent transform! This means it is " + "not possible to compute this value. Please ensure that all " @@ -101,7 +101,7 @@ class ContextAwareVisitor(MatcherDecoratableVisitor, MetadataDependent): """ module = self.context.module if module is None: - raise ValueError( + raise Exception( f"Attempted access of {self.__class__.__name__}.module outside of " + "transform_module()." ) diff --git a/libcst/codemod/commands/add_trailing_commas.py b/libcst/codemod/commands/add_trailing_commas.py deleted file mode 100644 index 2f33a4bd..00000000 --- a/libcst/codemod/commands/add_trailing_commas.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -import argparse -import textwrap -from typing import Dict, Optional - -import libcst as cst -from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand - - -presets_per_formatter: Dict[str, Dict[str, int]] = { - "black": { - "parameter_count": 1, - "argument_count": 2, - }, - "yapf": { - "parameter_count": 2, - "argument_count": 2, - }, -} - - -class AddTrailingCommas(VisitorBasedCodemodCommand): - DESCRIPTION: str = textwrap.dedent( - """ - Codemod that adds trailing commas to arguments in function - headers and function calls. - - The idea is that both the black and yapf autoformatters will - tend to split headers and function calls so that there - is one parameter / argument per line if there is a trailing - comma: - - Black will always separate them by line - - Yapf appears to do so whenever there are at least two arguments - - Applying this codemod (and then an autoformatter) may make - it easier to read function definitions and calls - """ - ) - - def __init__( - self, - context: CodemodContext, - formatter: str = "black", - parameter_count: Optional[int] = None, - argument_count: Optional[int] = None, - ) -> None: - super().__init__(context) - presets = presets_per_formatter.get(formatter) - if presets is None: - raise ValueError( - f"Unknown formatter {formatter!r}. Presets exist for " - + ", ".join(presets_per_formatter.keys()) - ) - self.parameter_count: int = parameter_count or presets["parameter_count"] - self.argument_count: int = argument_count or presets["argument_count"] - - @staticmethod - def add_args(arg_parser: argparse.ArgumentParser) -> None: - arg_parser.add_argument( - "--formatter", - dest="formatter", - metavar="FORMATTER", - help="Formatter to target (e.g. yapf or black)", - type=str, - default="black", - ) - arg_parser.add_argument( - "--paramter-count", - dest="parameter_count", - metavar="PARAMETER_COUNT", - help="Minimal number of parameters for us to add trailing comma", - type=int, - default=None, - ) - arg_parser.add_argument( - "--argument-count", - dest="argument_count", - metavar="ARGUMENT_COUNT", - help="Minimal number of arguments for us to add trailing comma", - type=int, - default=None, - ) - - def leave_Parameters( - self, - original_node: cst.Parameters, - updated_node: cst.Parameters, - ) -> cst.Parameters: - skip = ( - # - self.parameter_count is None - or len(updated_node.params) < self.parameter_count - or ( - len(updated_node.params) == 1 - and updated_node.params[0].name.value in {"self", "cls"} - ) - ) - if skip: - return updated_node - else: - last_param = updated_node.params[-1] - return updated_node.with_changes( - params=( - *updated_node.params[:-1], - last_param.with_changes(comma=cst.Comma()), - ), - ) - - def leave_Call( - self, - original_node: cst.Call, - updated_node: cst.Call, - ) -> cst.Call: - if len(updated_node.args) < self.argument_count: - return updated_node - else: - last_arg = updated_node.args[-1] - return updated_node.with_changes( - args=( - *updated_node.args[:-1], - last_arg.with_changes(comma=cst.Comma()), - ), - ) diff --git a/libcst/codemod/commands/convert_format_to_fstring.py b/libcst/codemod/commands/convert_format_to_fstring.py index 43d19bce..a74b5342 100644 --- a/libcst/codemod/commands/convert_format_to_fstring.py +++ b/libcst/codemod/commands/convert_format_to_fstring.py @@ -9,8 +9,6 @@ from typing import Generator, List, Optional, Sequence, Set, Tuple import libcst as cst import libcst.matchers as m -from libcst import CSTLogicError -from libcst._exceptions import ParserSyntaxError from libcst.codemod import ( CodemodContext, ContextAwareTransformer, @@ -25,7 +23,7 @@ def _get_lhs(field: cst.BaseExpression) -> cst.BaseExpression: elif isinstance(field, (cst.Attribute, cst.Subscript)): return _get_lhs(field.value) else: - raise TypeError("Unsupported node type!") + raise Exception("Unsupported node type!") def _find_expr_from_field_name( @@ -50,7 +48,7 @@ def _find_expr_from_field_name( if isinstance(lhs, cst.Integer): index = int(lhs.value) if index < 0 or index >= len(args): - raise CSTLogicError(f"Logic error, arg sequence {index} out of bounds!") + raise Exception(f"Logic error, arg sequence {index} out of bounds!") elif isinstance(lhs, cst.Name): for i, arg in enumerate(args): kw = arg.keyword @@ -60,12 +58,10 @@ def _find_expr_from_field_name( index = i break if index is None: - raise CSTLogicError(f"Logic error, arg name {lhs.value} out of bounds!") + raise Exception(f"Logic error, arg name {lhs.value} out of bounds!") if index is None: - raise CSTLogicError( - f"Logic error, unsupported fieldname expression {fieldname}!" - ) + raise Exception(f"Logic error, unsupported fieldname expression {fieldname}!") # Format it! return field_expr.deep_replace(lhs, args[index].value) @@ -145,7 +141,7 @@ def _get_tokens( # noqa: C901 in_brackets -= 1 if in_brackets < 0: - raise ValueError("Stray } in format string!") + raise Exception("Stray } in format string!") if in_brackets == 0: field_name, format_spec, conversion = _get_field(format_accum) @@ -162,11 +158,9 @@ def _get_tokens( # noqa: C901 format_accum += char if in_brackets > 0: - raise ParserSyntaxError( - "Stray { in format string!", lines=[string], raw_line=0, raw_column=0 - ) + raise Exception("Stray { in format string!") if format_accum: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") # Yield the last bit of information yield (prefix, None, None, None) @@ -194,7 +188,7 @@ class SwitchStringQuotesTransformer(ContextAwareTransformer): def __init__(self, context: CodemodContext, avoid_quote: str) -> None: super().__init__(context) if avoid_quote not in {'"', "'"}: - raise ValueError("Must specify either ' or \" single quote to avoid.") + raise Exception("Must specify either ' or \" single quote to avoid.") self.avoid_quote: str = avoid_quote self.replace_quote: str = '"' if avoid_quote == "'" else "'" @@ -225,6 +219,7 @@ class SwitchStringQuotesTransformer(ContextAwareTransformer): class ConvertFormatStringCommand(VisitorBasedCodemodCommand): + DESCRIPTION: str = "Converts instances of str.format() to f-string." @staticmethod @@ -276,7 +271,7 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): inserted_sequence: int = 0 stringnode = cst.ensure_type(extraction["string"], cst.SimpleString) tokens = _get_tokens(stringnode.raw_value) - for literal_text, field_name, format_spec, conversion in tokens: + for (literal_text, field_name, format_spec, conversion) in tokens: if literal_text: fstring.append(cst.FormattedStringText(literal_text)) if field_name is None: @@ -302,7 +297,7 @@ class ConvertFormatStringCommand(VisitorBasedCodemodCommand): ) in format_spec_tokens: if spec_format_spec is not None: # This shouldn't be possible, we don't allow it in the spec! - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") if spec_literal_text: format_spec_parts.append( cst.FormattedStringText(spec_literal_text) diff --git a/libcst/codemod/commands/convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py index f1de5b0c..91e78048 100644 --- a/libcst/codemod/commands/convert_namedtuple_to_dataclass.py +++ b/libcst/codemod/commands/convert_namedtuple_to_dataclass.py @@ -25,9 +25,7 @@ class ConvertNamedTupleToDataclassCommand(VisitorBasedCodemodCommand): NamedTuple-specific attributes and methods. """ - DESCRIPTION: str = ( - "Convert NamedTuple class declarations to Python 3.7 dataclasses using the @dataclass decorator." - ) + DESCRIPTION: str = "Convert NamedTuple class declarations to Python 3.7 dataclasses using the @dataclass decorator." METADATA_DEPENDENCIES: Sequence[ProviderT] = (QualifiedNameProvider,) # The 'NamedTuple' we are interested in diff --git a/libcst/codemod/commands/convert_percent_format_to_fstring.py b/libcst/codemod/commands/convert_percent_format_to_fstring.py index d74624e4..9908a5b6 100644 --- a/libcst/codemod/commands/convert_percent_format_to_fstring.py +++ b/libcst/codemod/commands/convert_percent_format_to_fstring.py @@ -53,12 +53,12 @@ class EscapeStringQuote(cst.CSTTransformer): original_node.prefix + quo + original_node.raw_value + quo ) if escaped_string.evaluated_value != original_node.evaluated_value: - raise ValueError( + raise Exception( f"Failed to escape string:\n original:{original_node.value}\n escaped:{escaped_string.value}" ) else: return escaped_string - raise ValueError( + raise Exception( f"Cannot find a good quote for escaping the SimpleString: {original_node.value}" ) return original_node @@ -97,11 +97,9 @@ class ConvertPercentFormatStringCommand(VisitorBasedCodemodCommand): parts.append(cst.FormattedStringText(value=token)) expressions: List[cst.CSTNode] = list( *itertools.chain( - ( - [elm.value for elm in expr.elements] - if isinstance(expr, cst.Tuple) - else [expr] - ) + [elm.value for elm in expr.elements] + if isinstance(expr, cst.Tuple) + else [expr] for expr in exprs ) ) diff --git a/libcst/codemod/commands/convert_type_comments.py b/libcst/codemod/commands/convert_type_comments.py index 5863d94b..337904d2 100644 --- a/libcst/codemod/commands/convert_type_comments.py +++ b/libcst/codemod/commands/convert_type_comments.py @@ -3,16 +3,13 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import argparse import ast import builtins -import dataclasses import functools import sys -from typing import cast, Dict, List, Optional, Sequence, Set, Tuple, Union +from typing import Optional, Set, Union import libcst as cst -import libcst.matchers as m from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand @@ -25,48 +22,15 @@ def _code_for_node(node: cst.CSTNode) -> str: return _empty_module().code_for_node(node) -def _ast_for_statement(node: cst.CSTNode) -> ast.stmt: - """ - Get the type-comment-enriched python AST for a node. - - If there are illegal type comments, this can return a SyntaxError. - In that case, return the same node with no type comments (which will - cause this codemod to ignore it). - """ +def _ast_for_node(node: cst.CSTNode) -> ast.Module: code = _code_for_node(node) - try: - return ast.parse(code, type_comments=True).body[-1] - except SyntaxError: - return ast.parse(code, type_comments=False).body[-1] + return ast.parse(code, type_comments=True) -def _parse_type_comment( - type_comment: Optional[str], -) -> Optional[ast.expr]: - """ - Attempt to parse a type comment. If it is None or if it fails to parse, - return None. - """ - if type_comment is None: - return None - try: - return ast.parse(type_comment, "", "eval").body - except SyntaxError: - return None - - -def _annotation_for_statement( - node: cst.CSTNode, -) -> Optional[ast.expr]: - return _parse_type_comment(_ast_for_statement(node).type_comment) - - -def _parse_func_type_comment( - func_type_comment: Optional[str], -) -> Optional["ast.FunctionType"]: - if func_type_comment is None: - return None - return ast.parse(func_type_comment, "", "func_type") +def _simple_statement_type_comment( + node: cst.SimpleStatementLine, +) -> Optional[str]: + return _ast_for_node(node).body[-1].type_comment @functools.lru_cache() @@ -78,442 +42,50 @@ def _is_builtin(annotation: str) -> bool: return annotation in _builtins() -def _convert_annotation( - raw: str, - quote_annotations: bool, -) -> cst.Annotation: - """ - Convert a raw annotation - which is a string coming from a type - comment - into a suitable libcst Annotation node. - - If `quote_annotations`, we'll always quote annotations unless they are builtin - types. The reason for this is to make the codemod safer to apply - on legacy code where type comments may well include invalid types - that would crash at runtime. - """ +def _convert_annotation(raw: str) -> cst.Annotation: + # Convert annotation comments to string annotations to be safe, + # otherwise runtime errors would be common. + # + # Special-case builtins to reduce the amount of quoting noise. + # + # NOTE: we could potentially detect more cases for skipping quotes + # using ScopeProvider, which would make the output prettier. if _is_builtin(raw): return cst.Annotation(annotation=cst.Name(value=raw)) - if not quote_annotations: - try: - return cst.Annotation(annotation=cst.parse_expression(raw)) - except cst.ParserSyntaxError: - pass - return cst.Annotation(annotation=cst.SimpleString(f'"{raw}"')) - - -def _is_type_comment(comment: Optional[cst.Comment]) -> bool: - """ - Determine whether a comment is a type comment. - - Unfortunately, to strip type comments in a location-invariant way requires - finding them from pure libcst data. We only use this in function defs, where - the precise cst location of the type comment cna be hard to predict. - """ - if comment is None: - return False - value = comment.value[1:].strip() - if not value.startswith("type:"): - return False - suffix = value.removeprefix("type:").strip().split() - if len(suffix) > 0 and suffix[0] == "ignore": - return False - return True - - -def _strip_type_comment(comment: Optional[cst.Comment]) -> Optional[cst.Comment]: - """ - Remove the type comment while keeping any following comments. - """ - if not _is_type_comment(comment): - return comment - assert comment is not None - idx = comment.value.find("#", 1) - if idx < 0: - return None - return comment.with_changes(value=comment.value[idx:]) - - -class _FailedToApplyAnnotation: - pass - - -class _ArityError(Exception): - pass - - -UnpackedBindings = Union[cst.BaseExpression, List["UnpackedBindings"]] -UnpackedAnnotations = Union[str, List["UnpackedAnnotations"]] -TargetAnnotationPair = Tuple[cst.BaseExpression, str] - - -class AnnotationSpreader: - """ - Utilities to help with lining up tuples of types from type comments with - the tuples of values with which they should be associated. - """ - - @staticmethod - def unpack_annotation( - expression: ast.expr, - ) -> UnpackedAnnotations: - if isinstance(expression, ast.Tuple): - return [ - AnnotationSpreader.unpack_annotation(elt) for elt in expression.elts - ] - else: - return ast.unparse(expression) - - @staticmethod - def unpack_target( - target: cst.BaseExpression, - ) -> UnpackedBindings: - """ - Take a (non-function-type) type comment and split it into - components. A type comment body should always be either a single - type or a tuple of types. - - We work with strings for annotations because without detailed scope - analysis that is the safest option for codemods. - """ - if isinstance(target, cst.Tuple): - return [ - AnnotationSpreader.unpack_target(element.value) - for element in target.elements - ] - else: - return target - - @staticmethod - def annotated_bindings( - bindings: UnpackedBindings, - annotations: UnpackedAnnotations, - ) -> List[Tuple[cst.BaseAssignTargetExpression, str]]: - if isinstance(annotations, list): - if isinstance(bindings, list) and len(bindings) == len(annotations): - # The arities match, so we return the flattened result of - # mapping annotated_bindings over each pair. - out: List[Tuple[cst.BaseAssignTargetExpression, str]] = [] - for binding, annotation in zip(bindings, annotations): - out.extend( - AnnotationSpreader.annotated_bindings(binding, annotation) - ) - return out - else: - # Either mismatched lengths, or multi-type and one-target - raise _ArityError() - elif isinstance(bindings, list): - # multi-target and one-type - raise _ArityError() - else: - assert isinstance(bindings, cst.BaseAssignTargetExpression) - return [(bindings, annotations)] - - @staticmethod - def type_declaration( - binding: cst.BaseAssignTargetExpression, - raw_annotation: str, - quote_annotations: bool, - ) -> cst.AnnAssign: - return cst.AnnAssign( - target=binding, - annotation=_convert_annotation( - raw=raw_annotation, - quote_annotations=quote_annotations, - ), - value=None, - ) - - @staticmethod - def type_declaration_statements( - bindings: UnpackedBindings, - annotations: UnpackedAnnotations, - leading_lines: Sequence[cst.EmptyLine], - quote_annotations: bool, - ) -> List[cst.SimpleStatementLine]: - return [ - cst.SimpleStatementLine( - body=[ - AnnotationSpreader.type_declaration( - binding=binding, - raw_annotation=raw_annotation, - quote_annotations=quote_annotations, - ) - ], - leading_lines=leading_lines if i == 0 else [], - ) - for i, (binding, raw_annotation) in enumerate( - AnnotationSpreader.annotated_bindings( - bindings=bindings, - annotations=annotations, - ) - ) - ] - - -def convert_Assign( - node: cst.Assign, - annotation: ast.expr, - quote_annotations: bool, -) -> Union[ - _FailedToApplyAnnotation, - cst.AnnAssign, - List[Union[cst.AnnAssign, cst.Assign]], -]: - # zip the type and target information tother. If there are mismatched - # arities, this is a PEP 484 violation (technically we could use - # logic beyond the PEP to recover some cases as typing.Tuple, but this - # should be rare) so we give up. - try: - annotations = AnnotationSpreader.unpack_annotation(annotation) - annotated_targets = [ - AnnotationSpreader.annotated_bindings( - bindings=AnnotationSpreader.unpack_target(target.target), - annotations=annotations, - ) - for target in node.targets - ] - except _ArityError: - return _FailedToApplyAnnotation() - if len(annotated_targets) == 1 and len(annotated_targets[0]) == 1: - # We can convert simple one-target assignments into a single AnnAssign - binding, raw_annotation = annotated_targets[0][0] - return cst.AnnAssign( - target=binding, - annotation=_convert_annotation( - raw=raw_annotation, - quote_annotations=quote_annotations, - ), - value=node.value, - semicolon=node.semicolon, - ) else: - # For multi-target assigns (regardless of whether they are using tuples - # on the LHS or multiple `=` tokens or both), we need to add a type - # declaration per individual LHS target. - type_declarations = [ - AnnotationSpreader.type_declaration( - binding, - raw_annotation, - quote_annotations=quote_annotations, - ) - for annotated_bindings in annotated_targets - for binding, raw_annotation in annotated_bindings - ] - return [ - *type_declarations, - node, - ] - - -@dataclasses.dataclass(frozen=True) -class FunctionTypeInfo: - arguments: Dict[str, Optional[str]] - returns: Optional[str] - - def is_empty(self) -> bool: - return self.returns is None and self.arguments == {} - - @classmethod - def from_cst( - cls, - node_cst: cst.FunctionDef, - is_method: bool, - ) -> "FunctionTypeInfo": - """ - Using the `ast` type comment extraction logic, get type information - for a function definition. - - To understand edge case behavior see the `leave_FunctionDef` docstring. - """ - node_ast = cast(ast.FunctionDef, _ast_for_statement(node_cst)) - # Note: this is guaranteed to have the correct arity. - args = [ - *node_ast.args.posonlyargs, - *node_ast.args.args, - *( - [] - if node_ast.args.vararg is None - else [ - node_ast.args.vararg, - ] - ), - *node_ast.args.kwonlyargs, - *( - [] - if node_ast.args.kwarg is None - else [ - node_ast.args.kwarg, - ] - ), - ] - try: - func_type_annotation = _parse_func_type_comment(node_ast.type_comment) - except SyntaxError: - # On unparsable function type annotations, ignore type information - return cls({}, None) - if func_type_annotation is None: - return cls( - arguments={ - arg.arg: arg.type_comment - for arg in args - if arg.type_comment is not None - }, - returns=None, - ) - else: - argtypes = func_type_annotation.argtypes - returns = ast.unparse(func_type_annotation.returns) - if ( - len(argtypes) == 1 - and isinstance(argtypes[0], ast.Constant) - # pyre-ignore [16] Pyre cannot refine constant indexes (yet!) - and argtypes[0].value is Ellipsis - ): - # Only use the return type if the comment was like `(...) -> R` - return cls( - arguments={arg.arg: arg.type_comment for arg in args}, - returns=returns, - ) - elif len(argtypes) == len(args): - # Merge the type comments, preferring inline comments where available - return cls( - arguments={ - arg.arg: arg.type_comment or ast.unparse(from_func_type) - for arg, from_func_type in zip(args, argtypes) - }, - returns=returns, - ) - elif is_method and len(argtypes) == len(args) - 1: - # Merge as above, but skip merging the initial `self` or `cls` arg. - return cls( - arguments={ - args[0].arg: args[0].type_comment, - **{ - arg.arg: arg.type_comment or ast.unparse(from_func_type) - for arg, from_func_type in zip(args[1:], argtypes) - }, - }, - returns=returns, - ) - else: - # On arity mismatches, ignore the type information - return cls({}, None) + return cst.Annotation(annotation=cst.SimpleString(f'"{raw}"')) class ConvertTypeComments(VisitorBasedCodemodCommand): - DESCRIPTION = """ - Codemod that converts type comments into Python 3.6+ style - annotations. + """ + Codemod that converts type comments, as described in + https://www.python.org/dev/peps/pep-0484/#type-comments, + into PEP 526 annotated assignments. - Notes: - - This transform requires using the `ast` module, which is not compatible - with multiprocessing. So you should run using a recent version of python, - and set `--jobs=1` if using `python -m libcst.tool codemod ...` from the - commandline. - - This transform requires capabilities from `ast` that are not available - prior to Python 3.9, so libcst must run on Python 3.9+. The code you are - transforming can by Python 3.6+, this limitation applies only to libcst - itself. - - We can handle type comments in the following statement types: - - Assign - - This is converted into a single AnnAssign when possible - - In more complicated cases it will produce multiple AnnAssign - nodes with no value (i.e. "type declaration" statements) - followed by an Assign - - For and With - - We prepend both of these with type declaration statements. - - FunctionDef - - We apply all the types we can find. If we find several: - - We prefer any existing annotations to type comments - - For parameters, we prefer inline type comments to - function-level type comments if we find both. - - We always apply the type comments as quote_annotations annotations, unless - we know that it refers to a builtin. We do not guarantee that - the resulting string annotations would parse, but they should - never cause failures at module import time. - - We attempt to: - - Always strip type comments for statements where we successfully - applied types. - - Never strip type comments for statements where we failed to - apply types. - - There are many edge case possible where the arity of a type - hint (which is either a tuple or a func_type) might not match - the code. In these cases we generally give up: - - For Assign, For, and With, we require that every target of - bindings (e.g. a tuple of names being bound) must have exactly - the same arity as the comment. - - So, for example, we would skip an assignment statement such as - ``x = y, z = 1, 2 # type: int, int`` because the arity - of ``x`` does not match the arity of the hint. - - For FunctionDef, we do *not* check arity of inline parameter - type comments but we do skip the transform if the arity of - the function does not match the function-level comment. + This is a work in progress: the codemod only currently handles + single-annotation assigns, but it will preserve any type comments + that it does not consume. """ - # Finding the location of a type comment in a FunctionDef is difficult. - # - # As a result, if when visiting a FunctionDef header we are able to - # successfully extrct type information then we aggressively strip type - # comments until we reach the first statement in the body. - # - # Once we get there we have to stop, so that we don't unintentionally remove - # unprocessed type comments. - # - # This state handles tracking everything we need for this. - function_type_info_stack: List[FunctionTypeInfo] - function_body_stack: List[cst.BaseSuite] - aggressively_strip_type_comments: bool - - @staticmethod - def add_args(arg_parser: argparse.ArgumentParser) -> None: - arg_parser.add_argument( - "--no-quote-annotations", - action="store_true", - help=( - "Add unquoted annotations. This leads to prettier code " - + "but possibly more errors if type comments are invalid." - ), - ) - - def __init__( - self, - context: CodemodContext, - no_quote_annotations: bool = False, - ) -> None: - if (sys.version_info.major, sys.version_info.minor) < (3, 9): - # The ast module did not get `unparse` until Python 3.9, - # or `type_comments` until Python 3.8 + def __init__(self, context: CodemodContext) -> None: + if (sys.version_info.major, sys.version_info.minor) < (3, 8): + # The ast module did not get `type_comments` until Python 3.7. + # In 3.6, we should error than silently running a nonsense codemod. # - # For earlier versions of python, raise early instead of failing - # later. It might be possible to use libcst parsing and the - # typed_ast library to support earlier python versions, but this is - # not a high priority. + # NOTE: it is possible to use the typed_ast library for 3.6, but + # this is not a high priority right now. See, e.g., the + # mypy.fastparse module. raise NotImplementedError( - "You are trying to run ConvertTypeComments, but libcst " - + "needs to be running with Python 3.9+ in order to " - + "do this. Try using Python 3.9+ to run your codemod. " - + "Note that the target code can be using Python 3.6+, " - + "it is only libcst that needs a new Python version." + "You are trying to run ConvertTypeComments on a " + + "python version without type comment support. Please " + + "try using python 3.8+ to run your codemod." ) super().__init__(context) - # flags used to control overall behavior - self.quote_annotations: bool = not no_quote_annotations - # state used to manage how we traverse nodes in various contexts - self.function_type_info_stack = [] - self.function_body_stack = [] - self.aggressively_strip_type_comments = False def _strip_TrailingWhitespace( self, node: cst.TrailingWhitespace, ) -> cst.TrailingWhitespace: - trailing_comment = _strip_type_comment(node.comment) - if trailing_comment is not None: - return node.with_changes(comment=trailing_comment) return node.with_changes( whitespace=cst.SimpleWhitespace( "" @@ -521,11 +93,27 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): comment=None, ) + def _convert_Assign( + self, + assign: cst.Assign, + type_comment: str, + ) -> Union[cst.AnnAssign, cst.Assign]: + if len(assign.targets) != 1: + # this case is not yet implemented, and we short-circuit + # it when handling SimpleStatementLine. + raise RuntimeError("Should not convert multi-target assign") + return cst.AnnAssign( + target=assign.targets[0].target, + annotation=_convert_annotation(raw=type_comment), + value=assign.value, + semicolon=assign.semicolon, + ) + def leave_SimpleStatementLine( self, original_node: cst.SimpleStatementLine, updated_node: cst.SimpleStatementLine, - ) -> Union[cst.SimpleStatementLine, cst.FlattenSentinel]: + ) -> cst.SimpleStatementLine: """ Convert any SimpleStatementLine containing an Assign with a type comment into a one that uses a PEP 526 AnnAssign. @@ -534,342 +122,25 @@ class ConvertTypeComments(VisitorBasedCodemodCommand): assign = updated_node.body[-1] if not isinstance(assign, cst.Assign): # only Assign matters return updated_node - annotation = _annotation_for_statement(original_node) - if annotation is None: + type_comment = _simple_statement_type_comment(original_node) + if type_comment is None: + return updated_node + if len(assign.targets) != 1: # multi-target Assign isn't used + return updated_node + target = assign.targets[0].target + if isinstance(target, cst.Tuple): # multi-element Assign isn't handled return updated_node # At this point have a single-line Assign with a type comment. # Convert it to an AnnAssign and strip the comment. - converted = convert_Assign( - node=assign, - annotation=annotation, - quote_annotations=self.quote_annotations, - ) - if isinstance(converted, _FailedToApplyAnnotation): - # We were unable to consume the type comment, so return the - # original code unchanged. - # TODO: allow stripping the invalid type comments via a flag - return updated_node - elif isinstance(converted, cst.AnnAssign): - # We were able to convert the Assign into an AnnAssign, so - # we can update the node. - return updated_node.with_changes( - body=[*updated_node.body[:-1], converted], - trailing_whitespace=self._strip_TrailingWhitespace( - updated_node.trailing_whitespace, - ), - ) - elif isinstance(converted, list): - # We need to inject two or more type declarations. - # - # In this case, we need to split across multiple lines, and - # this also means we'll spread any multi-statement lines out - # (multi-statement lines are PEP 8 violating anyway). - # - # We still preserve leading lines from before our transform. - new_statements = [ - *( - statement.with_changes( - semicolon=cst.MaybeSentinel.DEFAULT, - ) - for statement in updated_node.body[:-1] - ), - *converted, - ] - if len(new_statements) < 2: - raise RuntimeError("Unreachable code.") - return cst.FlattenSentinel( - [ - updated_node.with_changes( - body=[new_statements[0]], - trailing_whitespace=self._strip_TrailingWhitespace( - updated_node.trailing_whitespace, - ), - ), - *( - cst.SimpleStatementLine(body=[statement]) - for statement in new_statements[1:] - ), - ] - ) - else: - raise RuntimeError(f"Unhandled value {converted}") - - def leave_For( - self, - original_node: cst.For, - updated_node: cst.For, - ) -> Union[cst.For, cst.FlattenSentinel]: - """ - Convert a For with a type hint on the bound variable(s) to - use type declarations. - """ - # Type comments are only possible when the body is an indented - # block, and we need this refinement to work with the header, - # so we check and only then extract the type comment. - body = updated_node.body - if not isinstance(body, cst.IndentedBlock): - return updated_node - annotation = _annotation_for_statement(original_node) - if annotation is None: - return updated_node - # Zip up the type hint and the bindings. If we hit an arity - # error, abort. - try: - type_declarations = AnnotationSpreader.type_declaration_statements( - bindings=AnnotationSpreader.unpack_target(updated_node.target), - annotations=AnnotationSpreader.unpack_annotation(annotation), - leading_lines=updated_node.leading_lines, - quote_annotations=self.quote_annotations, - ) - except _ArityError: - return updated_node - # There is no arity error, so we can add the type delaration(s) - return cst.FlattenSentinel( - [ - *type_declarations, - updated_node.with_changes( - body=body.with_changes( - header=self._strip_TrailingWhitespace(body.header) - ), - leading_lines=[], - ), - ] - ) - - def leave_With( - self, - original_node: cst.With, - updated_node: cst.With, - ) -> Union[cst.With, cst.FlattenSentinel]: - """ - Convert a With with a type hint on the bound variable(s) to - use type declarations. - """ - # Type comments are only possible when the body is an indented - # block, and we need this refinement to work with the header, - # so we check and only then extract the type comment. - body = updated_node.body - if not isinstance(body, cst.IndentedBlock): - return updated_node - annotation = _annotation_for_statement(original_node) - if annotation is None: - return updated_node - # PEP 484 does not attempt to specify type comment semantics for - # multiple with bindings (there's more than one sensible way to - # do it), so we make no attempt to handle this - targets = [ - item.asname.name for item in updated_node.items if item.asname is not None - ] - if len(targets) != 1: - return updated_node - target = targets[0] - # Zip up the type hint and the bindings. If we hit an arity - # error, abort. - try: - type_declarations = AnnotationSpreader.type_declaration_statements( - bindings=AnnotationSpreader.unpack_target(target), - annotations=AnnotationSpreader.unpack_annotation(annotation), - leading_lines=updated_node.leading_lines, - quote_annotations=self.quote_annotations, - ) - except _ArityError: - return updated_node - # There is no arity error, so we can add the type delaration(s) - return cst.FlattenSentinel( - [ - *type_declarations, - updated_node.with_changes( - body=body.with_changes( - header=self._strip_TrailingWhitespace(body.header) - ), - leading_lines=[], - ), - ] - ) - - # Handle function definitions ------------------------- - - # **Implementation Notes** - # - # It is much harder to predict where exactly type comments will live - # in function definitions than in Assign / For / With. - # - # As a result, we use two different patterns: - # (A) we aggressively strip out type comments from whitespace between the - # start of a function define and the start of the body, whenever we were - # able to extract type information. This is done via mutable state and the - # usual visitor pattern. - # (B) we also manually reach down to the first statement inside of the - # function body and aggressively strip type comments from leading - # whitespaces - # - # PEP 484 underspecifies how to apply type comments to (non-static) - # methods - it would be possible to provide a type for `self`, or to omit - # it. So we accept either approach when interpreting type comments on - # non-static methods: the first argument an have a type provided or not. - - def _visit_FunctionDef( - self, - node: cst.FunctionDef, - is_method: bool, - ) -> None: - """ - Set up the data we need to handle function definitions: - - Parse the type comments. - - Store the resulting function type info on the stack, where it will - remain until we use it in `leave_FunctionDef` - - Set that we are aggressively stripping type comments, which will - remain true until we visit the body. - """ - function_type_info = FunctionTypeInfo.from_cst(node, is_method=is_method) - self.aggressively_strip_type_comments = not function_type_info.is_empty() - self.function_type_info_stack.append(function_type_info) - self.function_body_stack.append(node.body) - - @m.call_if_not_inside(m.ClassDef()) - @m.visit(m.FunctionDef()) - def visit_method( - self, - node: cst.FunctionDef, - ) -> None: - return self._visit_FunctionDef( - node=node, - is_method=False, - ) - - @m.call_if_inside(m.ClassDef()) - @m.visit(m.FunctionDef()) - def visit_function( - self, - node: cst.FunctionDef, - ) -> None: - return self._visit_FunctionDef( - node=node, - is_method=not any( - m.matches(d.decorator, m.Name("staticmethod")) for d in node.decorators - ), - ) - - def leave_TrailingWhitespace( - self, - original_node: cst.TrailingWhitespace, - updated_node: cst.TrailingWhitespace, - ) -> Union[cst.TrailingWhitespace]: - "Aggressively remove type comments when in header if we extracted types." - if self.aggressively_strip_type_comments and _is_type_comment( - updated_node.comment - ): - return cst.TrailingWhitespace() - else: - return updated_node - - def leave_EmptyLine( - self, - original_node: cst.EmptyLine, - updated_node: cst.EmptyLine, - ) -> Union[cst.EmptyLine, cst.RemovalSentinel]: - "Aggressively remove type comments when in header if we extracted types." - if self.aggressively_strip_type_comments and _is_type_comment( - updated_node.comment - ): - return cst.RemovalSentinel.REMOVE - else: - return updated_node - - def visit_FunctionDef_body( - self, - node: cst.FunctionDef, - ) -> None: - "Turn off aggressive type comment removal when we've left the header." - self.aggressively_strip_type_comments = False - - def leave_IndentedBlock( - self, - original_node: cst.IndentedBlock, - updated_node: cst.IndentedBlock, - ) -> cst.IndentedBlock: - "When appropriate, strip function type comment from the function body." - # abort unless this is the body of a function we are transforming - if len(self.function_body_stack) == 0: - return updated_node - if original_node is not self.function_body_stack[-1]: - return updated_node - if self.function_type_info_stack[-1].is_empty(): - return updated_node - # The comment will be in the body header if it was on the same line - # as the colon. - if _is_type_comment(updated_node.header.comment): - updated_node = updated_node.with_changes( - header=cst.TrailingWhitespace(), - ) - # The comment will be in a leading line of the first body statement - # if it was on the first line after the colon. - first_statement = updated_node.body[0] - if not hasattr(first_statement, "leading_lines"): - return updated_node return updated_node.with_changes( body=[ - first_statement.with_changes( - leading_lines=[ - line - # pyre-ignore[16]: we refined via `hasattr` - for line in first_statement.leading_lines - if not _is_type_comment(line.comment) - ] + *updated_node.body[:-1], + self._convert_Assign( + assign=assign, + type_comment=type_comment, ), - *updated_node.body[1:], - ] + ], + trailing_whitespace=self._strip_TrailingWhitespace( + updated_node.trailing_whitespace + ), ) - - # Methods for adding type annotations ---- - # - # By the time we get here, all type comments should already be stripped. - - def leave_Param( - self, - original_node: cst.Param, - updated_node: cst.Param, - ) -> cst.Param: - # ignore type comments if there's already an annotation - if updated_node.annotation is not None: - return updated_node - # find out if there's a type comment and apply it if so - function_type_info = self.function_type_info_stack[-1] - raw_annotation = function_type_info.arguments.get(updated_node.name.value) - if raw_annotation is not None: - return updated_node.with_changes( - annotation=_convert_annotation( - raw=raw_annotation, - quote_annotations=self.quote_annotations, - ) - ) - else: - return updated_node - - def leave_FunctionDef( - self, - original_node: cst.FunctionDef, - updated_node: cst.FunctionDef, - ) -> cst.FunctionDef: - self.function_body_stack.pop() - function_type_info = self.function_type_info_stack.pop() - if updated_node.returns is None and function_type_info.returns is not None: - return updated_node.with_changes( - returns=_convert_annotation( - raw=function_type_info.returns, - quote_annotations=self.quote_annotations, - ) - ) - else: - return updated_node - - def visit_Lambda( - self, - node: cst.Lambda, - ) -> bool: - """ - Disable traversing under lambdas. They don't have any statements - nested inside them so there's no need, and they do have Params which - we don't want to transform. - """ - return False diff --git a/libcst/codemod/commands/convert_union_to_or.py b/libcst/codemod/commands/convert_union_to_or.py deleted file mode 100644 index 96a64314..00000000 --- a/libcst/codemod/commands/convert_union_to_or.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -# pyre-strict - -import libcst as cst -from libcst.codemod import VisitorBasedCodemodCommand -from libcst.codemod.visitors import RemoveImportsVisitor -from libcst.metadata import QualifiedName, QualifiedNameProvider, QualifiedNameSource - - -class ConvertUnionToOrCommand(VisitorBasedCodemodCommand): - DESCRIPTION: str = "Convert `Union[A, B]` to `A | B` in Python 3.10+" - - METADATA_DEPENDENCIES = (QualifiedNameProvider,) - - def leave_Subscript( - self, original_node: cst.Subscript, updated_node: cst.Subscript - ) -> cst.BaseExpression: - """ - Given a subscript, check if it's a Union - if so, either flatten the members - into a nested BitOr (if multiple members) or unwrap the type (if only one member). - """ - if not QualifiedNameProvider.has_name( - self, - original_node, - QualifiedName(name="typing.Union", source=QualifiedNameSource.IMPORT), - ): - return updated_node - types = [ - cst.ensure_type( - cst.ensure_type(s, cst.SubscriptElement).slice, cst.Index - ).value - for s in updated_node.slice - ] - if len(types) == 1: - return types[0] - else: - replacement = cst.BinaryOperation( - left=types[0], right=types[1], operator=cst.BitOr() - ) - for type_ in types[2:]: - replacement = cst.BinaryOperation( - left=replacement, right=type_, operator=cst.BitOr() - ) - return replacement - - def leave_Module( - self, original_node: cst.Module, updated_node: cst.Module - ) -> cst.Module: - RemoveImportsVisitor.remove_unused_import( - self.context, module="typing", obj="Union" - ) - return updated_node diff --git a/libcst/codemod/commands/ensure_import_present.py b/libcst/codemod/commands/ensure_import_present.py index 44dda822..a97fddfd 100644 --- a/libcst/codemod/commands/ensure_import_present.py +++ b/libcst/codemod/commands/ensure_import_present.py @@ -11,6 +11,7 @@ from libcst.codemod.visitors import AddImportsVisitor class EnsureImportPresentCommand(MagicArgsCodemodCommand): + DESCRIPTION: str = ( "Given a module and possibly an entity in that module, add an import " + "as long as one does not already exist." diff --git a/libcst/codemod/commands/fix_pyre_directives.py b/libcst/codemod/commands/fix_pyre_directives.py index a9779d0f..c3ab41b7 100644 --- a/libcst/codemod/commands/fix_pyre_directives.py +++ b/libcst/codemod/commands/fix_pyre_directives.py @@ -7,7 +7,6 @@ from typing import Dict, Sequence, Union import libcst import libcst.matchers as m -from libcst import CSTLogicError from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand from libcst.helpers import insert_header_comments @@ -30,12 +29,12 @@ class FixPyreDirectivesCommand(VisitorBasedCodemodCommand): def visit_Module_header(self, node: libcst.Module) -> None: if self.in_module_header: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") self.in_module_header = True def leave_Module_header(self, node: libcst.Module) -> None: if not self.in_module_header: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") self.in_module_header = False def leave_EmptyLine( diff --git a/libcst/codemod/commands/fix_variadic_callable.py b/libcst/codemod/commands/fix_variadic_callable.py deleted file mode 100644 index 85cb0aa0..00000000 --- a/libcst/codemod/commands/fix_variadic_callable.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -# pyre-strict - -import libcst as cst -import libcst.matchers as m -from libcst.codemod import VisitorBasedCodemodCommand -from libcst.metadata import QualifiedName, QualifiedNameProvider, QualifiedNameSource - - -class FixVariadicCallableCommmand(VisitorBasedCodemodCommand): - DESCRIPTION: str = ( - "Fix incorrect variadic callable type annotations from `Callable[[...], T]` to `Callable[..., T]``" - ) - - METADATA_DEPENDENCIES = (QualifiedNameProvider,) - - def leave_Subscript( - self, original_node: cst.Subscript, updated_node: cst.Subscript - ) -> cst.BaseExpression: - if QualifiedNameProvider.has_name( - self, - original_node, - QualifiedName(name="typing.Callable", source=QualifiedNameSource.IMPORT), - ): - node_matches = len(updated_node.slice) == 2 and m.matches( - updated_node.slice[0], - m.SubscriptElement( - slice=m.Index(value=m.List(elements=[m.Element(m.Ellipsis())])) - ), - ) - - if node_matches: - slices = list(updated_node.slice) - slices[0] = cst.SubscriptElement(cst.Index(cst.Ellipsis())) - return updated_node.with_changes(slice=slices) - return updated_node diff --git a/libcst/codemod/commands/remove_unused_imports.py b/libcst/codemod/commands/remove_unused_imports.py index 2e6beafa..e9938d8a 100644 --- a/libcst/codemod/commands/remove_unused_imports.py +++ b/libcst/codemod/commands/remove_unused_imports.py @@ -9,7 +9,7 @@ from typing import Set, Tuple, Union from libcst import Import, ImportFrom, ImportStar, Module from libcst.codemod import CodemodContext, VisitorBasedCodemodCommand from libcst.codemod.visitors import GatherCommentsVisitor, RemoveImportsVisitor -from libcst.helpers import get_absolute_module_from_package_for_import +from libcst.helpers import get_absolute_module_for_import from libcst.metadata import PositionProvider, ProviderT DEFAULT_SUPPRESS_COMMENT_REGEX = ( @@ -74,8 +74,8 @@ class RemoveUnusedImportsCommand(VisitorBasedCodemodCommand): asname=alias.evaluated_alias, ) else: - module_name = get_absolute_module_from_package_for_import( - self.context.full_package_name, node + module_name = get_absolute_module_for_import( + self.context.full_module_name, node ) if module_name is None: raise ValueError( diff --git a/libcst/codemod/commands/rename.py b/libcst/codemod/commands/rename.py index f3accdcd..4b525ab3 100644 --- a/libcst/codemod/commands/rename.py +++ b/libcst/codemod/commands/rename.py @@ -15,7 +15,7 @@ from libcst.metadata import QualifiedNameProvider def leave_import_decorator( - method: Callable[..., Union[cst.Import, cst.ImportFrom]], + method: Callable[..., Union[cst.Import, cst.ImportFrom]] ) -> Callable[..., Union[cst.Import, cst.ImportFrom]]: # We want to record any 'as name' that is relevant but only after we leave the corresponding Import/ImportFrom node since # we don't want the 'as name' to interfere with children 'Name' and 'Attribute' nodes. @@ -92,43 +92,14 @@ class RenameCommand(VisitorBasedCodemodCommand): self.old_module: str = old_module self.old_mod_or_obj: str = old_mod_or_obj - @property - def as_name(self) -> Optional[Tuple[str, str]]: - if "as_name" not in self.context.scratch: - self.context.scratch["as_name"] = None - return self.context.scratch["as_name"] + self.as_name: Optional[Tuple[str, str]] = None - @as_name.setter - def as_name(self, value: Optional[Tuple[str, str]]) -> None: - self.context.scratch["as_name"] = value - - @property - def scheduled_removals( - self, - ) -> Set[Union[cst.CSTNode, Tuple[str, Optional[str], Optional[str]]]]: - """A set of nodes that have been renamed to help with the cleanup of now potentially unused - imports, during import cleanup in `leave_Module`. Can also contain tuples that can be passed - directly to RemoveImportsVisitor.remove_unused_import().""" - if "scheduled_removals" not in self.context.scratch: - self.context.scratch["scheduled_removals"] = set() - return self.context.scratch["scheduled_removals"] - - @scheduled_removals.setter - def scheduled_removals( - self, value: Set[Union[cst.CSTNode, Tuple[str, Optional[str], Optional[str]]]] - ) -> None: - self.context.scratch["scheduled_removals"] = value - - @property - def bypass_import(self) -> bool: - """A flag to indicate that an import has been renamed while inside an `Import` or `ImportFrom` node.""" - if "bypass_import" not in self.context.scratch: - self.context.scratch["bypass_import"] = False - return self.context.scratch["bypass_import"] - - @bypass_import.setter - def bypass_import(self, value: bool) -> None: - self.context.scratch["bypass_import"] = value + # A set of nodes that have been renamed to help with the cleanup of now potentially unused + # imports, during import cleanup in `leave_Module`. + self.scheduled_removals: Set[cst.CSTNode] = set() + # If an import has been renamed while inside an `Import` or `ImportFrom` node, we want to flag + # this so that we do not end up with two of the same import. + self.bypass_import = False def visit_Import(self, node: cst.Import) -> None: for import_alias in node.names: @@ -147,42 +118,40 @@ class RenameCommand(VisitorBasedCodemodCommand): ) -> cst.Import: new_names = [] for import_alias in updated_node.names: - # We keep the original import_alias here in case it's used by other symbols. - # It will be removed later in RemoveImportsVisitor if it's unused. - new_names.append(import_alias) import_alias_name = import_alias.name import_alias_full_name = get_full_name_for_node(import_alias_name) if import_alias_full_name is None: - raise ValueError("Could not parse full name for ImportAlias.name node.") + raise Exception("Could not parse full name for ImportAlias.name node.") - if self.old_name.startswith(import_alias_full_name + "."): - replacement_module = self.gen_replacement_module(import_alias_full_name) - if not replacement_module: - # here import_alias_full_name isn't an exact match for old_name - # don't add an import here, it will be handled either in more - # specific import aliases or at the very end - continue - self.bypass_import = True - if replacement_module != import_alias_full_name: - self.scheduled_removals.add(original_node) - new_name_node: Union[cst.Attribute, cst.Name] = ( - self.gen_name_or_attr_node(replacement_module) - ) - new_names.append(cst.ImportAlias(name=new_name_node)) - elif ( - import_alias_full_name == self.new_name - and import_alias.asname is not None + if isinstance(import_alias_name, cst.Name) and self.old_name.startswith( + import_alias_full_name + "." ): - self.bypass_import = True - # Add removal tuple instead of calling directly - self.scheduled_removals.add( - ( - import_alias.evaluated_name, - None, - import_alias.evaluated_alias, + # Might, be in use elsewhere in the code, so schedule a potential removal, and add another alias. + new_names.append(import_alias) + self.scheduled_removals.add(original_node) + new_names.append( + cst.ImportAlias( + name=cst.Name( + value=self.gen_replacement_module(import_alias_full_name) + ) ) ) - new_names.append(import_alias.with_changes(asname=None)) + self.bypass_import = True + elif isinstance( + import_alias_name, cst.Attribute + ) and self.old_name.startswith(import_alias_full_name + "."): + # Same idea as above. + new_names.append(import_alias) + self.scheduled_removals.add(original_node) + new_name_node: Union[ + cst.Attribute, cst.Name + ] = self.gen_name_or_attr_node( + self.gen_replacement_module(import_alias_full_name) + ) + new_names.append(cst.ImportAlias(name=new_name_node)) + self.bypass_import = True + else: + new_names.append(import_alias) return updated_node.with_changes(names=new_names) @@ -214,12 +183,13 @@ class RenameCommand(VisitorBasedCodemodCommand): return updated_node else: - new_names: list[cst.ImportAlias] = [] + new_names = [] for import_alias in names: alias_name = get_full_name_for_node(import_alias.name) if alias_name is not None: qual_name = f"{imported_module_name}.{alias_name}" if self.old_name == qual_name: + replacement_module = self.gen_replacement_module( imported_module_name ) @@ -231,16 +201,16 @@ class RenameCommand(VisitorBasedCodemodCommand): self.scheduled_removals.add(original_node) continue - new_import_alias_name: Union[cst.Attribute, cst.Name] = ( - self.gen_name_or_attr_node(replacement_obj) - ) + new_import_alias_name: Union[ + cst.Attribute, cst.Name + ] = self.gen_name_or_attr_node(replacement_obj) # Rename on the spot only if this is the only imported name under the module. if len(names) == 1: - updated_node = updated_node.with_changes( + self.bypass_import = True + return updated_node.with_changes( module=cst.parse_expression(replacement_module), + names=(cst.ImportAlias(name=new_import_alias_name),), ) - self.scheduled_removals.add(updated_node) - new_names.append(import_alias) # Or if the module name is to stay the same. elif replacement_module == imported_module_name: self.bypass_import = True @@ -252,10 +222,6 @@ class RenameCommand(VisitorBasedCodemodCommand): # This import might be in use elsewhere in the code, so schedule a potential removal. self.scheduled_removals.add(original_node) new_names.append(import_alias) - if isinstance(new_names[-1].comma, cst.Comma) and updated_node.rpar is None: - new_names[-1] = new_names[-1].with_changes( - comma=cst.MaybeSentinel.DEFAULT - ) return updated_node.with_changes(names=new_names) return updated_node @@ -286,30 +252,29 @@ class RenameCommand(VisitorBasedCodemodCommand): ) -> Union[cst.Name, cst.Attribute]: full_name_for_node = get_full_name_for_node(original_node) if full_name_for_node is None: - raise ValueError("Could not parse full name for Attribute node.") + raise Exception("Could not parse full name for Attribute node.") full_replacement_name = self.gen_replacement(full_name_for_node) # If a node has no associated QualifiedName, we are still inside an import statement. inside_import_statement: bool = not self.get_metadata( QualifiedNameProvider, original_node, set() ) - if QualifiedNameProvider.has_name( - self, - original_node, - self.old_name, - ) or (inside_import_statement and full_replacement_name == self.new_name): + if ( + QualifiedNameProvider.has_name( + self, + original_node, + self.old_name, + ) + or (inside_import_statement and full_replacement_name == self.new_name) + ): new_value, new_attr = self.new_module, self.new_mod_or_obj if not inside_import_statement: self.scheduled_removals.add(original_node.value) if full_replacement_name == self.new_name: - value = cst.parse_expression(new_value) - if new_attr: - return updated_node.with_changes( - value=value, - attr=cst.Name(value=new_attr.rstrip(".")), - ) - assert isinstance(value, (cst.Name, cst.Attribute)) - return value + return updated_node.with_changes( + value=cst.parse_expression(new_value), + attr=cst.Name(value=new_attr.rstrip(".")), + ) return self.gen_name_or_attr_node(new_attr) @@ -318,17 +283,14 @@ class RenameCommand(VisitorBasedCodemodCommand): def leave_Module( self, original_node: cst.Module, updated_node: cst.Module ) -> cst.Module: - for removal in self.scheduled_removals: - if isinstance(removal, tuple): - RemoveImportsVisitor.remove_unused_import( - self.context, removal[0], removal[1], removal[2] - ) - else: - RemoveImportsVisitor.remove_unused_import_by_node(self.context, removal) + for removal_node in self.scheduled_removals: + RemoveImportsVisitor.remove_unused_import_by_node( + self.context, removal_node + ) # If bypass_import is False, we know that no import statements were directly renamed, and the fact # that we have any `self.scheduled_removals` tells us we encountered a matching `old_name` in the code. if not self.bypass_import and self.scheduled_removals: - if self.new_module and self.new_module != "builtins": + if self.new_module: new_obj: Optional[str] = ( self.new_mod_or_obj.split(".")[0] if self.new_mod_or_obj else None ) @@ -347,14 +309,10 @@ class RenameCommand(VisitorBasedCodemodCommand): module_as_name[0] + ".", module_as_name[1] + ".", 1 ) - if self.old_module and original_name == self.old_mod_or_obj: + if original_name == self.old_mod_or_obj: return self.new_mod_or_obj - elif original_name == self.old_name: - return ( - self.new_mod_or_obj - if (not self.bypass_import and self.new_mod_or_obj) - else self.new_name - ) + elif original_name == ".".join([self.old_module, self.old_mod_or_obj]): + return self.new_name elif original_name.endswith("." + self.old_mod_or_obj): return self.new_mod_or_obj else: @@ -368,7 +326,7 @@ class RenameCommand(VisitorBasedCodemodCommand): ) -> Union[cst.Attribute, cst.Name]: name_or_attr_node: cst.BaseExpression = cst.parse_expression(dotted_expression) if not isinstance(name_or_attr_node, (cst.Name, cst.Attribute)): - raise ValueError( + raise Exception( "`parse_expression()` on dotted path returned non-Attribute-or-Name." ) return name_or_attr_node diff --git a/libcst/codemod/commands/rename_typing_generic_aliases.py b/libcst/codemod/commands/rename_typing_generic_aliases.py deleted file mode 100644 index d6906fe9..00000000 --- a/libcst/codemod/commands/rename_typing_generic_aliases.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -# pyre-strict -from functools import partial -from typing import cast, Generator - -from libcst.codemod import Codemod, MagicArgsCodemodCommand -from libcst.codemod.commands.rename import RenameCommand - - -class RenameTypingGenericAliases(MagicArgsCodemodCommand): - DESCRIPTION: str = ( - "Rename typing module aliases of builtin generics in Python 3.9+, for example: `typing.List` -> `list`" - ) - - MAPPING: dict[str, str] = { - "typing.List": "builtins.list", - "typing.Tuple": "builtins.tuple", - "typing.Dict": "builtins.dict", - "typing.FrozenSet": "builtins.frozenset", - "typing.Set": "builtins.set", - "typing.Type": "builtins.type", - } - - def get_transforms(self) -> Generator[type[Codemod], None, None]: - for from_type, to_type in self.MAPPING.items(): - yield cast( - type[Codemod], - partial( - RenameCommand, - old_name=from_type, - new_name=to_type, - ), - ) diff --git a/libcst/codemod/commands/strip_strings_from_types.py b/libcst/codemod/commands/strip_strings_from_types.py index 3f0894cd..6564d041 100644 --- a/libcst/codemod/commands/strip_strings_from_types.py +++ b/libcst/codemod/commands/strip_strings_from_types.py @@ -14,6 +14,7 @@ from libcst.metadata import QualifiedNameProvider class StripStringsCommand(VisitorBasedCodemodCommand): + DESCRIPTION: str = ( "Converts string type annotations to 3.7-compatible forward references." ) @@ -43,12 +44,8 @@ class StripStringsCommand(VisitorBasedCodemodCommand): self, original_node: libcst.SimpleString, updated_node: libcst.SimpleString ) -> Union[libcst.SimpleString, libcst.BaseExpression]: AddImportsVisitor.add_needed_import(self.context, "__future__", "annotations") - evaluated_value = updated_node.evaluated_value # Just use LibCST to evaluate the expression itself, and insert that as the # annotation. - if isinstance(evaluated_value, str): - return parse_expression( - evaluated_value, config=self.module.config_for_parsing - ) - else: - return updated_node + return parse_expression( + updated_node.evaluated_value, config=self.module.config_for_parsing + ) diff --git a/libcst/codemod/commands/tests/test_add_pyre_directive.py b/libcst/codemod/commands/tests/test_add_pyre_directive.py index 37e6f2f9..a8e7e45a 100644 --- a/libcst/codemod/commands/tests/test_add_pyre_directive.py +++ b/libcst/codemod/commands/tests/test_add_pyre_directive.py @@ -8,6 +8,7 @@ from libcst.codemod.commands.add_pyre_directive import AddPyreUnsafeCommand class TestAddPyreUnsafeCommand(CodemodTest): + TRANSFORM = AddPyreUnsafeCommand def test_add_to_file(self) -> None: diff --git a/libcst/codemod/commands/tests/test_add_trailing_commas.py b/libcst/codemod/commands/tests/test_add_trailing_commas.py deleted file mode 100644 index 1df31b69..00000000 --- a/libcst/codemod/commands/tests/test_add_trailing_commas.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# - -from libcst.codemod import CodemodTest -from libcst.codemod.commands.add_trailing_commas import AddTrailingCommas - - -class AddTrailingCommasTest(CodemodTest): - TRANSFORM = AddTrailingCommas - - def test_transform_defines(self) -> None: - before = """ - def f(x, y): - pass - - """ - after = """ - def f(x, y,): - pass - """ - self.assertCodemod(before, after) - - def test_skip_transforming_defines(self) -> None: - before = """ - # skip defines with no params. - def f0(): - pass - - # skip defines with a single param named `self`. - class Foo: - def __init__(self): - pass - """ - after = before - self.assertCodemod(before, after) - - def test_transform_calls(self) -> None: - before = """ - f(a, b, c) - - g(x=a, y=b, z=c) - """ - after = """ - f(a, b, c,) - - g(x=a, y=b, z=c,) - """ - self.assertCodemod(before, after) - - def test_skip_transforming_calls(self) -> None: - before = """ - # skip empty calls - f() - - # skip calls with one argument - g(a) - g(x=a) - """ - after = before - self.assertCodemod(before, after) - - def test_using_yapf_presets(self) -> None: - before = """ - def f(x): # skip single parameters for yapf - pass - - def g(x, y): - pass - """ - after = """ - def f(x): # skip single parameters for yapf - pass - - def g(x, y,): - pass - """ - self.assertCodemod(before, after, formatter="yapf") - - def test_using_custom_presets(self) -> None: - before = """ - def f(x, y, z): - pass - - f(5, 6, 7) - """ - after = before - self.assertCodemod(before, after, parameter_count=4, argument_count=4) diff --git a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py index 1a10303b..b2209cdc 100644 --- a/libcst/codemod/commands/tests/test_convert_format_to_fstring.py +++ b/libcst/codemod/commands/tests/test_convert_format_to_fstring.py @@ -8,6 +8,7 @@ from libcst.codemod.commands.convert_format_to_fstring import ConvertFormatStrin class ConvertFormatStringCommandTest(CodemodTest): + TRANSFORM = ConvertFormatStringCommand def test_noop(self) -> None: diff --git a/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py index 8e0b314d..866f03c4 100644 --- a/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py +++ b/libcst/codemod/commands/tests/test_convert_namedtuple_to_dataclass.py @@ -10,6 +10,7 @@ from libcst.codemod.commands.convert_namedtuple_to_dataclass import ( class ConvertNamedTupleToDataclassCommandTest(CodemodTest): + TRANSFORM = ConvertNamedTupleToDataclassCommand def test_no_change(self) -> None: diff --git a/libcst/codemod/commands/tests/test_convert_type_comments.py b/libcst/codemod/commands/tests/test_convert_type_comments.py index 2c5917d1..5e7f96ea 100644 --- a/libcst/codemod/commands/tests/test_convert_type_comments.py +++ b/libcst/codemod/commands/tests/test_convert_type_comments.py @@ -4,37 +4,28 @@ # LICENSE file in the root directory of this source tree. import sys -from typing import Any from libcst.codemod import CodemodTest from libcst.codemod.commands.convert_type_comments import ConvertTypeComments -class TestConvertTypeCommentsBase(CodemodTest): - maxDiff = 1500 +class TestConvertTypeComments(CodemodTest): + + maxDiff = 1000 TRANSFORM = ConvertTypeComments - def assertCodemod39Plus(self, before: str, after: str, **kwargs: Any) -> None: + def assertCodemod38Plus(self, before: str, after: str) -> None: """ - Assert that the codemod works on Python 3.9+, and that we raise - a NotImplementedError on other Python versions. + Assert that the codemod works on Python 3.8+, and that we raise + a NotImplementedError on other python versions. """ - if (sys.version_info.major, sys.version_info.minor) < (3, 9): + if (sys.version_info.major, sys.version_info.minor) < (3, 8): with self.assertRaises(NotImplementedError): - super().assertCodemod(before, after, **kwargs) + super().assertCodemod(before, after) else: - super().assertCodemod(before, after, **kwargs) + super().assertCodemod(before, after) - -class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): - def test_preserves_trailing_comment(self) -> None: - before = """ - y = 5 # type: int # foo - """ - after = """ - y: int = 5 # foo - """ - self.assertCodemod39Plus(before, after) + # Tests converting assignment type comments ----------------- def test_convert_assignments(self) -> None: before = """ @@ -45,13 +36,15 @@ class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): y: int = 5 z: "typing.Tuple[str, int]" = ('this', 7) """ - self.assertCodemod39Plus(before, after) + self.assertCodemod38Plus(before, after) def test_convert_assignments_in_context(self) -> None: """ Also verify that our matching works regardless of spacing """ before = """ + bar(); baz = 12 # type: int + def foo(): z = ('this', 7) # type: typing.Tuple[str, int] @@ -61,6 +54,8 @@ class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): self.attr1 = True # type: bool """ after = """ + bar(); baz: int = 12 + def foo(): z: "typing.Tuple[str, int]" = ('this', 7) @@ -69,136 +64,7 @@ class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): def __init__(self): self.attr1: bool = True """ - self.assertCodemod39Plus(before, after) - - def test_multiple_elements_in_assign_lhs(self) -> None: - before = """ - x, y = [], [] # type: List[int], List[str] - z, w = [], [] # type: (List[int], List[str]) - - a, b, *c = range(5) # type: float, float, List[float] - - d, (e1, e2) = foo() # type: float, (int, str) - """ - after = """ - x: "List[int]" - y: "List[str]" - x, y = [], [] - z: "List[int]" - w: "List[str]" - z, w = [], [] - - a: float - b: float - c: "List[float]" - a, b, *c = range(5) - - d: float - e1: int - e2: str - d, (e1, e2) = foo() - """ - self.assertCodemod39Plus(before, after) - - def test_multiple_assignments(self) -> None: - before = """ - x = y = z = 15 # type: int - - a, b = c, d = 'this', 'that' # type: (str, str) - """ - after = """ - x: int - y: int - z: int - x = y = z = 15 - - a: str - b: str - c: str - d: str - a, b = c, d = 'this', 'that' - """ - self.assertCodemod39Plus(before, after) - - def test_semicolons_with_assignment(self) -> None: - """ - When we convert an Assign to an AnnAssign, preserve - semicolons. But if we have to add separate type declarations, - expand them. - """ - before = """ - foo(); x = 12 # type: int - - bar(); y, z = baz() # type: int, str - """ - after = """ - foo(); x: int = 12 - - bar() - y: int - z: str - y, z = baz() - """ - self.assertCodemod39Plus(before, after) - - def test_converting_for_statements(self) -> None: - before = """ - # simple binding - for x in foo(): # type: int - pass - - # nested binding - for (a, (b, c)) in bar(): # type: int, (str, float) - pass - """ - after = """ - # simple binding - x: int - for x in foo(): - pass - - # nested binding - a: int - b: str - c: float - for (a, (b, c)) in bar(): - pass - """ - self.assertCodemod39Plus(before, after) - - def test_converting_with_statements(self) -> None: - before = """ - # simple binding - with open('file') as f: # type: File - pass - - # simple binding, with extra items - with foo(), open('file') as f, bar(): # type: File - pass - - # nested binding - with bar() as (a, (b, c)): # type: int, (str, float) - pass - """ - after = """ - # simple binding - f: "File" - with open('file') as f: - pass - - # simple binding, with extra items - f: "File" - with foo(), open('file') as f, bar(): - pass - - # nested binding - a: int - b: str - c: float - with bar() as (a, (b, c)): - pass - """ - self.assertCodemod39Plus(before, after) + self.assertCodemod38Plus(before, after) def test_no_change_when_type_comment_unused(self) -> None: before = """ @@ -208,274 +74,15 @@ class TestConvertTypeComments_AssignForWith(TestConvertTypeCommentsBase): # a commented type comment (per PEP 484) is not a type comment z = 15 # # type: int - # ignore unparseable type comments - var = "var" # type: this is not a python type! - # a type comment in an illegal location won't be used print("hello") # type: None - # These examples are not PEP 484 compliant, and result in arity errors + # We currently cannot handle multiple-target assigns. + # Make sure we won't strip those type comments. + x, y, z = [], [], [] # type: List[int], List[int], List[str] + x, y, z = [], [], [] # type: (List[int], List[int], List[str]) + a, b, *c = range(5) # type: float, float, List[float] a, b = 1, 2 # type: Tuple[int, int] - w = foo() # type: float, str - - # Multiple assigns with mismatched LHS arities always result in arity - # errors, and we only codemod if each target is error-free - v = v0, v1 = (3, 5) # type: int, int - - # Ignore for statements with arity mismatches - for x in []: # type: int, int - pass - - # Ignore with statements with arity mismatches - with open('file') as (f0, f1): # type: File - pass - - # Ignore with statements that have multiple item bindings - with open('file') as f0, open('file') as f1: # type: File - pass - - # In cases where the entire statement cannot successfully be parsed - # with `type_comments=True` because of an invalid type comment, we - # skip it. Here, annotating the inner `pass` is illegal. - for x in []: # type: int - pass # type: None """ after = before - self.assertCodemod39Plus(before, after) - - -class TestConvertTypeComments_FunctionDef(TestConvertTypeCommentsBase): - """ - Some notes on our testing strategy: In order to avoid a combinatorial - explosion in test cases, we leverage some knowledge about the - implementation. - - Here are the key ideas that allow us to write fewer cases: - - The logic for generating annotations is the same for all annotations, - and is well-covered by TestConvertTypeComments_AssignForWith, so we - can stick to just simple builtin types. - - The application of types is independent of where they came from. - - Type comment removal is indepenent of type application, other - than in the case where we give up entirely. - - The rules for which type gets used (existing annotation, inline comment, - or func type comment) is independent of the location of a parameter. - """ - - def test_simple_function_type_comments(self) -> None: - before = """ - def f0(x): # type: (...) -> None - pass - - def f1(x): # type: (int) -> None - pass - - def f2(x, /, y = 'y', *, z = 1.5): - # type: (int, str, float) -> None - pass - - def f3(x, *args, y, **kwargs): - # type: (str, int, str, float) -> None - pass - - def f4(x, *args, **kwargs): - # type: (str, *int, **float) -> None - pass - """ - after = """ - def f0(x) -> None: - pass - - def f1(x: int) -> None: - pass - - def f2(x: int, /, y: str = 'y', *, z: float = 1.5) -> None: - pass - - def f3(x: str, *args: int, y: str, **kwargs: float) -> None: - pass - - def f4(x: str, *args: int, **kwargs: float) -> None: - pass - """ - self.assertCodemod39Plus(before, after) - - def test_prioritization_order_for_type_application(self) -> None: - before = """ - def f( - x: int, # type: str - y, # type: str - z - ): # type: (float, float, float) -> None - pass - """ - after = """ - def f( - x: int, - y: str, - z: float - ) -> None: - pass - """ - self.assertCodemod39Plus(before, after) - - def test_inlined_function_type_comments(self) -> None: - before = """ - def f( - x, # not-a-type-comment - # also-not-a-type-comment - y = 42, # type: int - *args, - # type: technically-another-line-is-legal :o - z, - **kwargs, # type: str - ): # not-a-type-comment - # also-not-a-type-comment - pass - """ - after = """ - def f( - x, # not-a-type-comment - # also-not-a-type-comment - y: int = 42, - *args: "technically-another-line-is-legal :o", - z, - **kwargs: str, - ): # not-a-type-comment - # also-not-a-type-comment - pass - """ - self.assertCodemod39Plus(before, after) - - def test_method_transforms(self) -> None: - before = """ - class A: - - def __init__(self, thing): # type: (str) -> None - self.thing = thing - - @classmethod - def make(cls): # type: () -> A - return cls("thing") - - @staticmethod - def f(x, y): # type: (object, object) -> None - pass - - def method0( - self, - other_thing, - ): # type: (str) -> bool - return self.thing == other_thing - - def method1( - self, # type: A - other_thing, # type: str - ): # type: (int) -> bool - return self.thing == other_thing - - def method2( - self, - other_thing, - ): # type: (A, str) -> bool - return self.thing == other_thing - """ - after = """ - class A: - - def __init__(self, thing: str) -> None: - self.thing = thing - - @classmethod - def make(cls) -> "A": - return cls("thing") - - @staticmethod - def f(x: object, y: object) -> None: - pass - - def method0( - self, - other_thing: str, - ) -> bool: - return self.thing == other_thing - - def method1( - self: "A", - other_thing: str, - ) -> bool: - return self.thing == other_thing - - def method2( - self: "A", - other_thing: str, - ) -> bool: - return self.thing == other_thing - """ - self.assertCodemod39Plus(before, after) - - def test_no_change_if_function_type_comments_unused(self) -> None: - before = """ - # arity error in arguments - def f(x, y): # type: (int) -> float - pass - - # unparseable function type - def f(x, y): # type: this is not a type! - pass - - # In cases where the entire statement cannot successfully be parsed - # with `type_comments=True` because of an invalid type comment, we - # skip it. Here, annotating the inner `pass` is illegal. - def f(x, y): # type: (int, int) -> None - pass # type: None - """ - after = before - self.assertCodemod39Plus(before, after) - - def test_do_not_traverse_lambda_Param(self) -> None: - """ - The Param node can happen not just in FunctionDef but also in - Lambda. Make sure this doesn't cause problems. - """ - before = """ - @dataclass - class WrapsAFunction: - func: Callable - msg_gen: Callable = lambda self: f"calling {self.func.__name__}..." - """ - after = before - self.assertCodemod39Plus(before, after) - - def test_no_quoting(self) -> None: - before = """ - def f(x): - # type: (Foo) -> Foo - pass - w = x # type: Foo - y, z = x, x # type: (Foo, Foo) - return w - - with get_context() as context: # type: Context - pass - - for loop_var in the_iterable: # type: LoopType - pass - """ - after = """ - def f(x: Foo) -> Foo: - pass - w: Foo = x - y: Foo - z: Foo - y, z = x, x - return w - - context: Context - with get_context() as context: - pass - - loop_var: LoopType - for loop_var in the_iterable: - pass - """ - self.assertCodemod39Plus(before, after, no_quote_annotations=True) + self.assertCodemod38Plus(before, after) diff --git a/libcst/codemod/commands/tests/test_convert_union_to_or.py b/libcst/codemod/commands/tests/test_convert_union_to_or.py deleted file mode 100644 index 5ba557d2..00000000 --- a/libcst/codemod/commands/tests/test_convert_union_to_or.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -# pyre-strict - -from libcst.codemod import CodemodTest -from libcst.codemod.commands.convert_union_to_or import ConvertUnionToOrCommand - - -class TestConvertUnionToOrCommand(CodemodTest): - TRANSFORM = ConvertUnionToOrCommand - - def test_simple_union(self) -> None: - before = """ - from typing import Union - x: Union[int, str] - """ - after = """ - x: int | str - """ - self.assertCodemod(before, after) - - def test_nested_union(self) -> None: - before = """ - from typing import Union - x: Union[int, Union[str, float]] - """ - after = """ - x: int | str | float - """ - self.assertCodemod(before, after) - - def test_single_type_union(self) -> None: - before = """ - from typing import Union - x: Union[int] - """ - after = """ - x: int - """ - self.assertCodemod(before, after) - - def test_union_with_alias(self) -> None: - before = """ - import typing as t - x: t.Union[int, str] - """ - after = """ - import typing as t - x: int | str - """ - self.assertCodemod(before, after) - - def test_union_with_unused_import(self) -> None: - before = """ - from typing import Union, List - x: Union[int, str] - """ - after = """ - from typing import List - x: int | str - """ - self.assertCodemod(before, after) - - def test_union_no_import(self) -> None: - before = """ - x: Union[int, str] - """ - after = """ - x: Union[int, str] - """ - self.assertCodemod(before, after) - - def test_union_in_function(self) -> None: - before = """ - from typing import Union - def foo(x: Union[int, str]) -> Union[float, None]: - ... - """ - after = """ - def foo(x: int | str) -> float | None: - ... - """ - self.assertCodemod(before, after) diff --git a/libcst/codemod/commands/tests/test_fix_pyre_directives.py b/libcst/codemod/commands/tests/test_fix_pyre_directives.py index 4707073a..8871397a 100644 --- a/libcst/codemod/commands/tests/test_fix_pyre_directives.py +++ b/libcst/codemod/commands/tests/test_fix_pyre_directives.py @@ -8,15 +8,21 @@ from libcst.codemod.commands.fix_pyre_directives import FixPyreDirectivesCommand class TestFixPyreDirectivesCommand(CodemodTest): + TRANSFORM = FixPyreDirectivesCommand def test_no_need_to_fix_simple(self) -> None: """ Tests that a pyre-strict inside the module header doesn't get touched. """ - after = ( - before - ) = """ + before = """ + # pyre-strict + from typing import List + + def baz() -> List[Foo]: + pass + """ + after = """ # pyre-strict from typing import List @@ -29,9 +35,16 @@ class TestFixPyreDirectivesCommand(CodemodTest): """ Tests that a pyre-strict inside the module header doesn't get touched. """ - after = ( - before - ) = """ + before = """ + # This is some header comment. + # + # pyre-strict + from typing import List + + def baz() -> List[Foo]: + pass + """ + after = """ # This is some header comment. # # pyre-strict @@ -46,9 +59,17 @@ class TestFixPyreDirectivesCommand(CodemodTest): """ Tests that a pyre-strict inside the module header doesn't get touched. """ - after = ( - before - ) = """ + before = """ + # pyre-strict + # + # This is some header comment. + + from typing import List + + def baz() -> List[Foo]: + pass + """ + after = """ # pyre-strict # # This is some header comment. diff --git a/libcst/codemod/commands/tests/test_fix_variadic_callable.py b/libcst/codemod/commands/tests/test_fix_variadic_callable.py deleted file mode 100644 index 848f0c98..00000000 --- a/libcst/codemod/commands/tests/test_fix_variadic_callable.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -# pyre-strict - -from libcst.codemod import CodemodTest -from libcst.codemod.commands.fix_variadic_callable import FixVariadicCallableCommmand - - -class TestFixVariadicCallableCommmand(CodemodTest): - TRANSFORM = FixVariadicCallableCommmand - - def test_callable_typing(self) -> None: - before = """ - from typing import Callable - x: Callable[[...], int] = ... - """ - after = """ - from typing import Callable - x: Callable[..., int] = ... - """ - self.assertCodemod(before, after) - - def test_callable_typing_alias(self) -> None: - before = """ - import typing as t - x: t.Callable[[...], int] = ... - """ - after = """ - import typing as t - x: t.Callable[..., int] = ... - """ - self.assertCodemod(before, after) - - def test_callable_import_alias(self) -> None: - before = """ - from typing import Callable as C - x: C[[...], int] = ... - """ - after = """ - from typing import Callable as C - x: C[..., int] = ... - """ - self.assertCodemod(before, after) - - def test_callable_with_optional(self) -> None: - before = """ - from typing import Callable - def foo(bar: Optional[Callable[[...], int]]) -> Callable[[...], int]: - ... - """ - after = """ - from typing import Callable - def foo(bar: Optional[Callable[..., int]]) -> Callable[..., int]: - ... - """ - self.assertCodemod(before, after) - - def test_callable_with_arguments(self) -> None: - before = """ - from typing import Callable - x: Callable[[int], int] - """ - after = """ - from typing import Callable - x: Callable[[int], int] - """ - self.assertCodemod(before, after) - - def test_callable_with_variadic_arguments(self) -> None: - before = """ - from typing import Callable - x: Callable[[int, int, ...], int] - """ - after = """ - from typing import Callable - x: Callable[[int, int, ...], int] - """ - self.assertCodemod(before, after) - - def test_callable_no_arguments(self) -> None: - before = """ - from typing import Callable - x: Callable - """ - after = """ - from typing import Callable - x: Callable - """ - self.assertCodemod(before, after) diff --git a/libcst/codemod/commands/tests/test_noop.py b/libcst/codemod/commands/tests/test_noop.py index fa586a3d..12995f5c 100644 --- a/libcst/codemod/commands/tests/test_noop.py +++ b/libcst/codemod/commands/tests/test_noop.py @@ -8,6 +8,7 @@ from libcst.codemod.commands.noop import NOOPCommand class TestNOOPCodemod(CodemodTest): + TRANSFORM = NOOPCommand def test_noop(self) -> None: diff --git a/libcst/codemod/commands/tests/test_remove_pyre_directive.py b/libcst/codemod/commands/tests/test_remove_pyre_directive.py index c99f88ce..74c1847c 100644 --- a/libcst/codemod/commands/tests/test_remove_pyre_directive.py +++ b/libcst/codemod/commands/tests/test_remove_pyre_directive.py @@ -11,6 +11,7 @@ from libcst.codemod.commands.remove_pyre_directive import ( class TestRemovePyreStrictCommand(CodemodTest): + TRANSFORM = RemovePyreStrictCommand def test_remove_from_file(self) -> None: @@ -96,6 +97,7 @@ class TestRemovePyreStrictCommand(CodemodTest): class TestRemovePyreUnsafeCommand(CodemodTest): + TRANSFORM = RemovePyreUnsafeCommand def test_remove_from_file(self) -> None: diff --git a/libcst/codemod/commands/tests/test_rename.py b/libcst/codemod/commands/tests/test_rename.py index 5b6e0128..e0668bfb 100644 --- a/libcst/codemod/commands/tests/test_rename.py +++ b/libcst/codemod/commands/tests/test_rename.py @@ -10,9 +10,11 @@ from libcst.codemod.commands.rename import RenameCommand class TestRenameCommand(CodemodTest): + TRANSFORM = RenameCommand def test_rename_name(self) -> None: + before = """ from foo import bar @@ -28,20 +30,8 @@ class TestRenameCommand(CodemodTest): self.assertCodemod(before, after, old_name="foo.bar", new_name="baz.qux") - def test_rename_to_builtin(self) -> None: - before = """ - from typing import List - x: List[int] = [] - """ - after = """ - x: list[int] = [] - """ - - self.assertCodemod( - before, after, old_name="typing.List", new_name="builtins.list" - ) - def test_rename_name_asname(self) -> None: + before = """ from foo import bar as bla @@ -83,6 +73,7 @@ class TestRenameCommand(CodemodTest): ) def test_rename_attr(self) -> None: + before = """ import a.b @@ -104,6 +95,7 @@ class TestRenameCommand(CodemodTest): ) def test_rename_attr_asname(self) -> None: + before = """ import foo as bar @@ -124,27 +116,6 @@ class TestRenameCommand(CodemodTest): new_name="baz.quux", ) - def test_rename_attr_asname_2(self) -> None: - before = """ - import foo.qux as bar - - def test() -> None: - bar.z(5) - """ - after = """ - import baz.quux - - def test() -> None: - baz.quux.z(5) - """ - - self.assertCodemod( - before, - after, - old_name="foo.qux", - new_name="baz.quux", - ) - def test_rename_module_import(self) -> None: before = """ import a.b @@ -309,38 +280,6 @@ class TestRenameCommand(CodemodTest): new_name="a.b.module_3.Class_3", ) - def test_import_same_module(self) -> None: - before = """ - import logging - logging.warn(1) - """ - after = """ - import logging - logging.warning(1) - """ - self.assertCodemod( - before, - after, - old_name="logging.warn", - new_name="logging.warning", - ) - - def test_import_same_dotted_module(self) -> None: - before = """ - import a.b - a.b.warn(1) - """ - after = """ - import a.b - a.b.warning(1) - """ - self.assertCodemod( - before, - after, - old_name="a.b.warn", - new_name="a.b.warning", - ) - def test_rename_local_variable(self) -> None: before = """ x = 5 @@ -395,28 +334,6 @@ class TestRenameCommand(CodemodTest): new_name="d.z", ) - def test_comma_import(self) -> None: - before = """ - import a, b, c - - class Foo(a.z): - bar: b.bar - baz: c.baz - """ - after = """ - import a, b, d - - class Foo(a.z): - bar: b.bar - baz: d.baz - """ - self.assertCodemod( - before, - after, - old_name="c.baz", - new_name="d.baz", - ) - def test_other_import_froms_untouched(self) -> None: before = """ from a import b, c, d @@ -440,61 +357,6 @@ class TestRenameCommand(CodemodTest): new_name="f.b", ) - def test_comma_import_from(self) -> None: - before = """ - from a import b, c, d - - class Foo(b): - bar: c.bar - baz: d.baz - """ - after = """ - from a import b, c - from f import d - - class Foo(b): - bar: c.bar - baz: d.baz - """ - self.assertCodemod( - before, - after, - old_name="a.d", - new_name="f.d", - ) - - def test_comma_import_from_parens(self) -> None: - before = """ - from a import ( - b, - c, - d, - ) - from x import (y,) - - class Foo(b): - bar: c.bar - baz: d.baz - """ - after = """ - from a import ( - b, - c, - ) - from x import (y,) - from f import d - - class Foo(b): - bar: c.bar - baz: d.baz - """ - self.assertCodemod( - before, - after, - old_name="a.d", - new_name="f.d", - ) - def test_no_removal_of_import_in_use(self) -> None: before = """ import a @@ -798,90 +660,3 @@ class TestRenameCommand(CodemodTest): bar(42) """ self.assertCodemod(before, before, old_name="baz.bar", new_name="qux.bar") - - def test_rename_single_with_colon(self) -> None: - before = """ - from a.b import qux - - print(qux) - """ - after = """ - from a import b - - print(b.qux) - """ - self.assertCodemod( - before, - after, - old_name="a.b.qux", - new_name="a:b.qux", - ) - - def test_import_parent_module(self) -> None: - before = """ - import a - a.b.c(a.b.c.d) - """ - after = """ - from z import c - - c(c.d) - """ - self.assertCodemod(before, after, old_name="a.b.c", new_name="z.c") - - def test_import_parent_module_2(self) -> None: - before = """ - import a.b - a.b.c.d(a.b.c.d.x) - """ - after = """ - from z import c - - c(c.x) - """ - self.assertCodemod(before, after, old_name="a.b.c.d", new_name="z.c") - - def test_import_parent_module_3(self) -> None: - before = """ - import a - a.b.c(a.b.c.d) - """ - after = """ - import z.c - - z.c(z.c.d) - """ - self.assertCodemod(before, after, old_name="a.b.c", new_name="z.c:") - - def test_import_parent_module_asname(self) -> None: - before = """ - import a.b as alias - alias.c(alias.c.d) - """ - after = """ - import z - z.c(z.c.d) - """ - self.assertCodemod(before, after, old_name="a.b.c", new_name="z.c") - - def test_push_down_toplevel_names(self) -> None: - before = """ - import foo - foo.baz() - """ - after = """ - import quux.foo - quux.foo.baz() - """ - self.assertCodemod(before, after, old_name="foo", new_name="quux.foo") - - def test_push_down_toplevel_names_with_asname(self) -> None: - before = """ - import foo as bar - bar.baz() - """ - after = """ - import quux.foo - quux.foo.baz() - """ - self.assertCodemod(before, after, old_name="foo", new_name="quux.foo") diff --git a/libcst/codemod/commands/tests/test_rename_typing_generic_aliases.py b/libcst/codemod/commands/tests/test_rename_typing_generic_aliases.py deleted file mode 100644 index 7a0a83c3..00000000 --- a/libcst/codemod/commands/tests/test_rename_typing_generic_aliases.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -# pyre-strict - -from libcst.codemod import CodemodTest -from libcst.codemod.commands.rename_typing_generic_aliases import ( - RenameTypingGenericAliases, -) - - -class TestRenameCommand(CodemodTest): - TRANSFORM = RenameTypingGenericAliases - - def test_rename_typing_generic_alias(self) -> None: - before = """ - from typing import List, Set, Dict, FrozenSet, Tuple - x: List[int] = [] - y: Set[int] = set() - z: Dict[str, int] = {} - a: FrozenSet[str] = frozenset() - b: Tuple[int, str] = (1, "hello") - """ - after = """ - x: list[int] = [] - y: set[int] = set() - z: dict[str, int] = {} - a: frozenset[str] = frozenset() - b: tuple[int, str] = (1, "hello") - """ - self.assertCodemod(before, after) diff --git a/libcst/codemod/commands/tests/test_strip_strings_from_types.py b/libcst/codemod/commands/tests/test_strip_strings_from_types.py index 3c3893b1..64a2fd10 100644 --- a/libcst/codemod/commands/tests/test_strip_strings_from_types.py +++ b/libcst/codemod/commands/tests/test_strip_strings_from_types.py @@ -8,6 +8,7 @@ from libcst.codemod.commands.strip_strings_from_types import StripStringsCommand class TestStripStringsCodemod(CodemodTest): + TRANSFORM = StripStringsCommand def test_noop(self) -> None: diff --git a/libcst/codemod/commands/unnecessary_format_string.py b/libcst/codemod/commands/unnecessary_format_string.py index 2320af17..d3bcd1c9 100644 --- a/libcst/codemod/commands/unnecessary_format_string.py +++ b/libcst/codemod/commands/unnecessary_format_string.py @@ -9,6 +9,7 @@ from libcst.codemod import VisitorBasedCodemodCommand class UnnecessaryFormatString(VisitorBasedCodemodCommand): + DESCRIPTION: str = ( "Converts f-strings which perform no formatting to regular strings." ) diff --git a/libcst/codemod/tests/test_cli.py b/libcst/codemod/tests/test_cli.py new file mode 100644 index 00000000..9c1834e5 --- /dev/null +++ b/libcst/codemod/tests/test_cli.py @@ -0,0 +1,66 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +from typing import Optional + +from libcst.codemod._cli import _calculate_module +from libcst.testing.utils import data_provider, UnitTest + + +class TestPackageCalculation(UnitTest): + @data_provider( + ( + # Providing no root should give back no module. + (None, "/some/dummy/file.py", None), + # Providing a file outside the root should give back no module. + ("/home/username/root", "/some/dummy/file.py", None), + ("/home/username/root/", "/some/dummy/file.py", None), + ("/home/username/root", "/home/username/file.py", None), + # Various files inside the root should give back valid modules. + ("/home/username/root", "/home/username/root/file.py", "file"), + ("/home/username/root/", "/home/username/root/file.py", "file"), + ( + "/home/username/root/", + "/home/username/root/some/dir/file.py", + "some.dir.file", + ), + # Various special files inside the root should give back valid modules. + ( + "/home/username/root/", + "/home/username/root/some/dir/__init__.py", + "some.dir", + ), + ( + "/home/username/root/", + "/home/username/root/some/dir/__main__.py", + "some.dir", + ), + # some windows tests + ( + "c:/Program Files/", + "d:/Program Files/some/dir/file.py", + None, + ), + ( + "c:/Program Files/other/", + "c:/Program Files/some/dir/file.py", + None, + ), + ( + "c:/Program Files/", + "c:/Program Files/some/dir/file.py", + "some.dir.file", + ), + ( + "c:/Program Files/", + "c:/Program Files/some/dir/__main__.py", + "some.dir", + ), + ), + ) + def test_calculate_module( + self, repo_root: Optional[str], filename: str, module: str + ) -> None: + self.assertEqual(_calculate_module(repo_root, filename), module) diff --git a/libcst/codemod/tests/test_codemod.py b/libcst/codemod/tests/test_codemod.py index ba68b94b..754f35fe 100644 --- a/libcst/codemod/tests/test_codemod.py +++ b/libcst/codemod/tests/test_codemod.py @@ -23,6 +23,7 @@ class SimpleCodemod(Codemod): class TestSkipDetection(CodemodTest): + TRANSFORM = SimpleCodemod def test_detect_skip(self) -> None: @@ -86,6 +87,7 @@ class IncrementCodemod(Codemod): class TestMultipass(CodemodTest): + TRANSFORM = IncrementCodemod def test_multi_iterations(self) -> None: diff --git a/libcst/codemod/tests/test_codemod_cli.py b/libcst/codemod/tests/test_codemod_cli.py index 9798b071..934ae667 100644 --- a/libcst/codemod/tests/test_codemod_cli.py +++ b/libcst/codemod/tests/test_codemod_cli.py @@ -5,115 +5,36 @@ # -import platform import subprocess import sys -import tempfile from pathlib import Path -from unittest import skipIf -from libcst.codemod import CodemodTest +from libcst._parser.entrypoints import is_native from libcst.testing.utils import UnitTest class TestCodemodCLI(UnitTest): - # pyre-ignore - no idea why pyre is complaining about this - @skipIf(platform.system() == "Windows", "Windows") def test_codemod_formatter_error_input(self) -> None: rlt = subprocess.run( [ - sys.executable, + "python", "-m", "libcst.tool", "codemod", "remove_unused_imports.RemoveUnusedImportsCommand", - # `ArgumentParser.parse_known_args()`'s behavior dictates that options - # need to go after instead of before the codemod command identifier. - "--python-version", - "3.6", str(Path(__file__).parent / "codemod_formatter_error_input.py.txt"), ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) - self.assertIn( - "error: cannot format -: Cannot parse for target version Python 3.6: 13:10: async with AsyncExitStack() as stack:", - rlt.stderr.decode("utf-8"), - ) - - def test_codemod_external(self) -> None: - # Test running the NOOP command as an "external command" - # against this very file. - output = subprocess.check_output( - [ - sys.executable, - "-m", - "libcst.tool", - "codemod", - "-x", # external module - "libcst.codemod.commands.noop.NOOPCommand", - str(Path(__file__)), - ], - encoding="utf-8", - stderr=subprocess.STDOUT, - ) - assert "Finished codemodding 1 files!" in output - - def test_warning_messages_several_files(self) -> None: - code = """ - def baz() -> str: - return "{}: {}".format(*baz) - """ - with tempfile.TemporaryDirectory() as tmpdir: - p = Path(tmpdir) - (p / "mod1.py").write_text(CodemodTest.make_fixture_data(code)) - (p / "mod2.py").write_text(CodemodTest.make_fixture_data(code)) - (p / "mod3.py").write_text(CodemodTest.make_fixture_data(code)) - output = subprocess.run( - [ - sys.executable, - "-m", - "libcst.tool", - "codemod", - "convert_format_to_fstring.ConvertFormatStringCommand", - str(p), - ], - encoding="utf-8", - stderr=subprocess.PIPE, - ) - # Each module will generate a warning, so we should get 3 warnings in total + version = sys.version_info + if version[0] == 3 and version[1] == 6 and not is_native(): self.assertIn( - "- 3 warnings were generated.", - output.stderr, - ) - - def test_matcher_decorators_multiprocessing(self) -> None: - file_count = 5 - code = """ - def baz(): # type: int - return 5 - """ - with tempfile.TemporaryDirectory() as tmpdir: - p = Path(tmpdir) - # Using more than chunksize=4 files to trigger multiprocessing - for i in range(file_count): - (p / f"mod{i}.py").write_text(CodemodTest.make_fixture_data(code)) - output = subprocess.run( - [ - sys.executable, - "-m", - "libcst.tool", - "codemod", - # Good candidate since it uses matcher decorators - "convert_type_comments.ConvertTypeComments", - str(p), - "--jobs", - str(file_count), - ], - encoding="utf-8", - stderr=subprocess.PIPE, + "ParserSyntaxError: Syntax Error @ 14:11.", + rlt.stderr.decode("utf-8"), ) + else: self.assertIn( - f"Transformed {file_count} files successfully.", - output.stderr, + "error: cannot format -: Cannot parse: 13:10: async with AsyncExitStack() as stack:", + rlt.stderr.decode("utf-8"), ) diff --git a/libcst/codemod/tests/test_command_helpers.py b/libcst/codemod/tests/test_command_helpers.py deleted file mode 100644 index 15c461b6..00000000 --- a/libcst/codemod/tests/test_command_helpers.py +++ /dev/null @@ -1,325 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -from typing import Union - -import libcst as cst -from libcst.codemod import CodemodTest, VisitorBasedCodemodCommand - - -class TestRemoveUnusedImportHelper(CodemodTest): - """Tests for the remove_unused_import helper method in CodemodCommand.""" - - def test_remove_unused_import_simple(self) -> None: - """ - Test that remove_unused_import helper method works correctly. - """ - - class RemoveBarImport(VisitorBasedCodemodCommand): - def visit_Module(self, node: cst.Module) -> None: - # Use the helper method to schedule removal - self.remove_unused_import("bar") - - before = """ - import bar - import baz - - def foo() -> None: - pass - """ - after = """ - import baz - - def foo() -> None: - pass - """ - - self.TRANSFORM = RemoveBarImport - self.assertCodemod(before, after) - - def test_remove_unused_import_from_simple(self) -> None: - """ - Test that remove_unused_import helper method works correctly with from imports. - """ - - class RemoveBarFromImport(VisitorBasedCodemodCommand): - def visit_Module(self, node: cst.Module) -> None: - # Use the helper method to schedule removal - self.remove_unused_import("a.b.c", "bar") - - before = """ - from a.b.c import bar, baz - - def foo() -> None: - baz() - """ - after = """ - from a.b.c import baz - - def foo() -> None: - baz() - """ - - self.TRANSFORM = RemoveBarFromImport - self.assertCodemod(before, after) - - def test_remove_unused_import_with_alias(self) -> None: - """ - Test that remove_unused_import helper method works correctly with aliased imports. - """ - - class RemoveBarAsQuxImport(VisitorBasedCodemodCommand): - def visit_Module(self, node: cst.Module) -> None: - # Use the helper method to schedule removal - self.remove_unused_import("a.b.c", "bar", "qux") - - before = """ - from a.b.c import bar as qux, baz - - def foo() -> None: - baz() - """ - after = """ - from a.b.c import baz - - def foo() -> None: - baz() - """ - - self.TRANSFORM = RemoveBarAsQuxImport - self.assertCodemod(before, after) - - -class TestRemoveUnusedImportByNodeHelper(CodemodTest): - """Tests for the remove_unused_import_by_node helper method in CodemodCommand.""" - - def test_remove_unused_import_by_node_simple(self) -> None: - """ - Test that remove_unused_import_by_node helper method works correctly. - """ - - class RemoveBarCallAndImport(VisitorBasedCodemodCommand): - METADATA_DEPENDENCIES = ( - cst.metadata.QualifiedNameProvider, - cst.metadata.ScopeProvider, - ) - - def leave_SimpleStatementLine( - self, - original_node: cst.SimpleStatementLine, - updated_node: cst.SimpleStatementLine, - ) -> Union[cst.RemovalSentinel, cst.SimpleStatementLine]: - # Remove any statement that calls bar() - if cst.matchers.matches( - updated_node, - cst.matchers.SimpleStatementLine( - body=[cst.matchers.Expr(cst.matchers.Call())] - ), - ): - call = cst.ensure_type(updated_node.body[0], cst.Expr).value - if cst.matchers.matches( - call, cst.matchers.Call(func=cst.matchers.Name("bar")) - ): - # Use the helper method to remove imports referenced by this node - self.remove_unused_import_by_node(original_node) - return cst.RemoveFromParent() - return updated_node - - before = """ - from foo import bar, baz - - def fun() -> None: - bar() - baz() - """ - after = """ - from foo import baz - - def fun() -> None: - baz() - """ - - self.TRANSFORM = RemoveBarCallAndImport - self.assertCodemod(before, after) - - -class TestAddNeededImportHelper(CodemodTest): - """Tests for the add_needed_import helper method in CodemodCommand.""" - - def test_add_needed_import_simple(self) -> None: - """ - Test that add_needed_import helper method works correctly. - """ - - class AddBarImport(VisitorBasedCodemodCommand): - def visit_Module(self, node: cst.Module) -> None: - # Use the helper method to schedule import addition - self.add_needed_import("bar") - - before = """ - def foo() -> None: - pass - """ - after = """ - import bar - - def foo() -> None: - pass - """ - - self.TRANSFORM = AddBarImport - self.assertCodemod(before, after) - - def test_add_needed_import_from_simple(self) -> None: - """ - Test that add_needed_import helper method works correctly with from imports. - """ - - class AddBarFromImport(VisitorBasedCodemodCommand): - def visit_Module(self, node: cst.Module) -> None: - # Use the helper method to schedule import addition - self.add_needed_import("a.b.c", "bar") - - before = """ - def foo() -> None: - pass - """ - after = """ - from a.b.c import bar - - def foo() -> None: - pass - """ - - self.TRANSFORM = AddBarFromImport - self.assertCodemod(before, after) - - def test_add_needed_import_with_alias(self) -> None: - """ - Test that add_needed_import helper method works correctly with aliased imports. - """ - - class AddBarAsQuxImport(VisitorBasedCodemodCommand): - def visit_Module(self, node: cst.Module) -> None: - # Use the helper method to schedule import addition - self.add_needed_import("a.b.c", "bar", "qux") - - before = """ - def foo() -> None: - pass - """ - after = """ - from a.b.c import bar as qux - - def foo() -> None: - pass - """ - - self.TRANSFORM = AddBarAsQuxImport - self.assertCodemod(before, after) - - def test_add_needed_import_relative(self) -> None: - """ - Test that add_needed_import helper method works correctly with relative imports. - """ - - class AddRelativeImport(VisitorBasedCodemodCommand): - def visit_Module(self, node: cst.Module) -> None: - # Use the helper method to schedule relative import addition - self.add_needed_import("c", "bar", relative=2) - - before = """ - def foo() -> None: - pass - """ - after = """ - from ..c import bar - - def foo() -> None: - pass - """ - - self.TRANSFORM = AddRelativeImport - self.assertCodemod(before, after) - - -class TestCombinedHelpers(CodemodTest): - """Tests for combining add_needed_import and remove_unused_import helper methods.""" - - def test_add_and_remove_imports(self) -> None: - """ - Test that both helper methods work correctly when used together. - """ - - class ReplaceBarWithBaz(VisitorBasedCodemodCommand): - def visit_Module(self, node: cst.Module) -> None: - # Add new import and remove old one - self.add_needed_import("new_module", "baz") - self.remove_unused_import("old_module", "bar") - - before = """ - from other_module import qux - from old_module import bar - - def foo() -> None: - pass - """ - after = """ - from other_module import qux - from new_module import baz - - def foo() -> None: - pass - """ - - self.TRANSFORM = ReplaceBarWithBaz - self.assertCodemod(before, after) - - def test_add_and_remove_same_import(self) -> None: - """ - Test that both helper methods work correctly when used together. - """ - - class AddAndRemoveBar(VisitorBasedCodemodCommand): - def visit_Module(self, node: cst.Module) -> None: - # Add new import and remove old one - self.add_needed_import("hello_module", "bar") - self.remove_unused_import("hello_module", "bar") - - self.TRANSFORM = AddAndRemoveBar - - before = """ - from other_module import baz - - def foo() -> None: - pass - """ - # Should remain unchanged - self.assertCodemod(before, before) - - before = """ - from other_module import baz - from hello_module import bar - - def foo() -> None: - bar.func() - """ - self.assertCodemod(before, before) - - before = """ - from other_module import baz - from hello_module import bar - - def foo() -> None: - pass - """ - - after = """ - from other_module import baz - - def foo() -> None: - pass - """ - self.assertCodemod(before, after) diff --git a/libcst/codemod/tests/test_metadata.py b/libcst/codemod/tests/test_metadata.py index eff3a228..1a99b3dd 100644 --- a/libcst/codemod/tests/test_metadata.py +++ b/libcst/codemod/tests/test_metadata.py @@ -13,6 +13,7 @@ from libcst.testing.utils import UnitTest class TestingCollector(ContextAwareVisitor): + METADATA_DEPENDENCIES = (PositionProvider,) def visit_Pass(self, node: cst.Pass) -> None: @@ -21,6 +22,7 @@ class TestingCollector(ContextAwareVisitor): class TestingTransform(ContextAwareTransformer): + METADATA_DEPENDENCIES = (PositionProvider,) def visit_FunctionDef(self, node: cst.FunctionDef) -> None: diff --git a/libcst/codemod/visitors/__init__.py b/libcst/codemod/visitors/__init__.py index 632d6fa6..1cbbd2c8 100644 --- a/libcst/codemod/visitors/__init__.py +++ b/libcst/codemod/visitors/__init__.py @@ -7,7 +7,6 @@ from libcst.codemod.visitors._add_imports import AddImportsVisitor from libcst.codemod.visitors._apply_type_annotations import ApplyTypeAnnotationsVisitor from libcst.codemod.visitors._gather_comments import GatherCommentsVisitor from libcst.codemod.visitors._gather_exports import GatherExportsVisitor -from libcst.codemod.visitors._gather_global_names import GatherGlobalNamesVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor from libcst.codemod.visitors._gather_string_annotation_names import ( GatherNamesFromStringAnnotationsVisitor, @@ -21,7 +20,6 @@ __all__ = [ "ApplyTypeAnnotationsVisitor", "GatherCommentsVisitor", "GatherExportsVisitor", - "GatherGlobalNamesVisitor", "GatherImportsVisitor", "GatherNamesFromStringAnnotationsVisitor", "GatherUnusedImportsVisitor", diff --git a/libcst/codemod/visitors/_add_imports.py b/libcst/codemod/visitors/_add_imports.py index eeab43ae..64131dd6 100644 --- a/libcst/codemod/visitors/_add_imports.py +++ b/libcst/codemod/visitors/_add_imports.py @@ -7,52 +7,12 @@ from collections import defaultdict from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst -from libcst import CSTLogicError, matchers as m, parse_statement -from libcst._nodes.statement import Import, ImportFrom, SimpleStatementLine +from libcst import matchers as m, parse_statement from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer -from libcst.codemod.visitors._gather_imports import _GatherImportsMixin +from libcst.codemod.visitors._gather_imports import GatherImportsVisitor from libcst.codemod.visitors._imports import ImportItem -from libcst.helpers import get_absolute_module_from_package_for_import -from libcst.helpers.common import ensure_type - - -class _GatherTopImportsBeforeStatements(_GatherImportsMixin): - """ - Works similarly to GatherImportsVisitor, but only considers imports - declared before any other statements of the module with the exception - of docstrings and __strict__ flag. - """ - - def __init__(self, context: CodemodContext) -> None: - super().__init__(context) - # Track all of the imports found in this transform - self.all_imports: List[Union[libcst.Import, libcst.ImportFrom]] = [] - - def leave_Module(self, original_node: libcst.Module) -> None: - start = 1 if _skip_first(original_node) else 0 - for stmt in original_node.body[start:]: - if m.matches( - stmt, - m.SimpleStatementLine(body=[m.ImportFrom() | m.Import()]), - ): - stmt = ensure_type(stmt, SimpleStatementLine) - # Workaround for python 3.8 and 3.9, won't accept Union for isinstance - if m.matches(stmt.body[0], m.ImportFrom()): - imp = ensure_type(stmt.body[0], ImportFrom) - self.all_imports.append(imp) - if m.matches(stmt.body[0], m.Import()): - imp = ensure_type(stmt.body[0], Import) - self.all_imports.append(imp) - else: - break - for imp in self.all_imports: - if m.matches(imp, m.Import()): - imp = ensure_type(imp, Import) - self._handle_Import(imp) - else: - imp = ensure_type(imp, ImportFrom) - self._handle_ImportFrom(imp) +from libcst.helpers import get_absolute_module_for_import class AddImportsVisitor(ContextAwareTransformer): @@ -107,7 +67,7 @@ class AddImportsVisitor(ContextAwareTransformer): ) -> List[ImportItem]: imports = context.scratch.get(AddImportsVisitor.CONTEXT_KEY, []) if not isinstance(imports, list): - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") return imports @staticmethod @@ -136,7 +96,7 @@ class AddImportsVisitor(ContextAwareTransformer): """ if module == "__future__" and obj is None: - raise ValueError("Cannot import __future__ directly!") + raise Exception("Cannot import __future__ directly!") imports = AddImportsVisitor._get_imports_from_context(context) imports.append(ImportItem(module, obj, asname, relative)) context.scratch[AddImportsVisitor.CONTEXT_KEY] = imports @@ -157,12 +117,12 @@ class AddImportsVisitor(ContextAwareTransformer): # Verify that the imports are valid for imp in imps: if imp.module == "__future__" and imp.obj_name is None: - raise ValueError("Cannot import __future__ directly!") + raise Exception("Cannot import __future__ directly!") if imp.module == "__future__" and imp.alias is not None: - raise ValueError("Cannot import __future__ objects with aliases!") + raise Exception("Cannot import __future__ objects with aliases!") # Resolve relative imports if we have a module name - imps = [imp.resolve_relative(self.context.full_package_name) for imp in imps] + imps = [imp.resolve_relative(self.context.full_module_name) for imp in imps] # List of modules we need to ensure are imported self.module_imports: Set[str] = { @@ -209,12 +169,12 @@ class AddImportsVisitor(ContextAwareTransformer): for module in sorted(from_imports_aliases) } - # Track the list of imports found at the top of the file + # Track the list of imports found in the file self.all_imports: List[Union[libcst.Import, libcst.ImportFrom]] = [] def visit_Module(self, node: libcst.Module) -> None: - # Do a preliminary pass to gather the imports we already have at the top - gatherer = _GatherTopImportsBeforeStatements(self.context) + # Do a preliminary pass to gather the imports we already have + gatherer = GatherImportsVisitor(self.context) node.visit(gatherer) self.all_imports = gatherer.all_imports @@ -223,7 +183,7 @@ class AddImportsVisitor(ContextAwareTransformer): if module in self.module_aliases and self.module_aliases[module] == alias: del self.module_aliases[module] for module, aliases in gatherer.alias_mapping.items(): - for obj, alias in aliases: + for (obj, alias) in aliases: if ( module in self.alias_mapping and (obj, alias) in self.alias_mapping[module] @@ -253,13 +213,9 @@ class AddImportsVisitor(ContextAwareTransformer): # There's nothing to do here! return updated_node - # Ensure this is one of the imports at the top - if original_node not in self.all_imports: - return updated_node - # Get the module we're importing as a string, see if we have work to do. - module = get_absolute_module_from_package_for_import( - self.context.full_package_name, updated_node + module = get_absolute_module_for_import( + self.context.full_module_name, updated_node ) if ( module is None @@ -304,26 +260,39 @@ class AddImportsVisitor(ContextAwareTransformer): statement_before_import_location = 0 import_add_location = 0 + # never insert an import before initial __strict__ flag + if m.matches( + orig_module, + m.Module( + body=[ + m.SimpleStatementLine( + body=[ + m.Assign( + targets=[m.AssignTarget(target=m.Name("__strict__"))] + ) + ] + ), + m.ZeroOrMore(), + ] + ), + ): + statement_before_import_location = import_add_location = 1 + # This works under the principle that while we might modify node contents, # we have yet to modify the number of statements. So we can match on the # original tree but break up the statements of the modified tree. If we # change this assumption in this visitor, we will have to change this code. - - # Finds the location to add imports. It is the end of the first import block that occurs before any other statement (save for docstrings) - - # Never insert an import before initial __strict__ flag or docstring - if _skip_first(orig_module): - statement_before_import_location = import_add_location = 1 - - for i, statement in enumerate( - orig_module.body[statement_before_import_location:] - ): + for i, statement in enumerate(orig_module.body): if m.matches( - statement, m.SimpleStatementLine(body=[m.ImportFrom() | m.Import()]) + statement, m.SimpleStatementLine(body=[m.Expr(value=m.SimpleString())]) ): - import_add_location = i + statement_before_import_location + 1 - else: - break + statement_before_import_location = import_add_location = 1 + elif isinstance(statement, libcst.SimpleStatementLine): + for possible_import in statement.body: + for last_import in self.all_imports: + if possible_import is last_import: + import_add_location = i + 1 + break return ( list(updated_module.body[:statement_before_import_location]), @@ -445,28 +414,3 @@ class AddImportsVisitor(ContextAwareTransformer): *statements_after_imports, ) ) - - -def _skip_first(orig_module: libcst.Module) -> bool: - # Is there a __strict__ flag or docstring at the top? - if m.matches( - orig_module, - m.Module( - body=[ - m.SimpleStatementLine( - body=[ - m.Assign(targets=[m.AssignTarget(target=m.Name("__strict__"))]) - ] - ), - m.ZeroOrMore(), - ] - ) - | m.Module( - body=[ - m.SimpleStatementLine(body=[m.Expr(value=m.SimpleString())]), - m.ZeroOrMore(), - ] - ), - ): - return True - return False diff --git a/libcst/codemod/visitors/_apply_type_annotations.py b/libcst/codemod/visitors/_apply_type_annotations.py index 59347420..43f5ed03 100644 --- a/libcst/codemod/visitors/_apply_type_annotations.py +++ b/libcst/codemod/visitors/_apply_type_annotations.py @@ -1,19 +1,16 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) 2016-present, Meta Platforms, Inc. # # This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. +# LICENSE file in the root directory of this source tree +# -from collections import defaultdict -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import Dict, List, Optional, Sequence, Set, Tuple, Union import libcst as cst -import libcst.matchers as m - from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer from libcst.codemod.visitors._add_imports import AddImportsVisitor -from libcst.codemod.visitors._gather_global_names import GatherGlobalNamesVisitor from libcst.codemod.visitors._gather_imports import GatherImportsVisitor from libcst.codemod.visitors._imports import ImportItem from libcst.helpers import get_full_name_for_node @@ -31,44 +28,7 @@ StarParamType = Union[ ] -def _module_and_target(qualified_name: str) -> Tuple[str, str]: - relative_prefix = "" - while qualified_name.startswith("."): - relative_prefix += "." - qualified_name = qualified_name[1:] - split = qualified_name.rsplit(".", 1) - if len(split) == 1: - qualifier, target = "", split[0] - else: - qualifier, target = split - return (relative_prefix + qualifier, target) - - -def _get_unique_qualified_name( - visitor: m.MatcherDecoratableVisitor, node: cst.CSTNode -) -> str: - name = None - names = [q.name for q in visitor.get_metadata(QualifiedNameProvider, node)] - if len(names) == 0: - # we hit this branch if the stub is directly using a fully - # qualified name, which is not technically valid python but is - # convenient to allow. - name = get_full_name_for_node(node) - elif len(names) == 1 and isinstance(names[0], str): - name = names[0] - if name is None: - start = visitor.get_metadata(PositionProvider, node).start - raise ValueError( - "Could not resolve a unique qualified name for type " - + f"{get_full_name_for_node(node)} at {start.line}:{start.column}. " - + f"Candidate names were: {names!r}" - ) - return name - - -def _get_import_alias_names( - import_aliases: Sequence[cst.ImportAlias], -) -> Set[str]: +def _get_import_alias_names(import_aliases: Sequence[cst.ImportAlias]) -> Set[str]: import_names = set() for imported_name in import_aliases: asname = imported_name.asname @@ -79,17 +39,7 @@ def _get_import_alias_names( return import_names -def _get_imported_names( - imports: Sequence[Union[cst.Import, cst.ImportFrom]], -) -> Set[str]: - """ - Given a series of import statements (both Import and ImportFrom), - determine all of the names that have been imported into the current - scope. For example: - - ``import foo.bar as bar, foo.baz`` produces ``{'bar', 'foo.baz'}`` - - ``from foo import (Bar, Baz as B)`` produces ``{'Bar', 'B'}`` - - ``from foo import *`` produces ``set()` because we cannot resolve names - """ +def _get_import_names(imports: Sequence[Union[cst.Import, cst.ImportFrom]]) -> Set[str]: import_names = set() for _import in imports: if isinstance(_import, cst.Import): @@ -101,37 +51,12 @@ def _get_imported_names( return import_names -def _is_non_sentinel( - x: Union[None, cst.CSTNode, cst.MaybeSentinel], -) -> bool: +def _is_set(x: Union[None, cst.CSTNode, cst.MaybeSentinel]) -> bool: return x is not None and x != cst.MaybeSentinel.DEFAULT -def _get_string_value( - node: cst.SimpleString, -) -> str: - s = node.value - c = s[-1] - return s[s.index(c) : -1] - - -def _find_generic_base( - node: cst.ClassDef, -) -> Optional[cst.Arg]: - for b in node.bases: - if m.matches(b.value, m.Subscript(value=m.Name("Generic"))): - return b - - @dataclass(frozen=True) class FunctionKey: - """ - Class representing a funciton name and signature. - - This exists to ensure we do not attempt to apply stubs to functions whose - definition is incompatible. - """ - name: str pos: int kwonly: str @@ -140,24 +65,13 @@ class FunctionKey: star_kwarg: bool @classmethod - def make( - cls, - name: str, - params: cst.Parameters, - ) -> "FunctionKey": + def make(cls, name: str, params: cst.Parameters) -> "FunctionKey": pos = len(params.params) kwonly = ",".join(sorted(x.name.value for x in params.kwonly_params)) posonly = len(params.posonly_params) - star_arg = _is_non_sentinel(params.star_arg) - star_kwarg = _is_non_sentinel(params.star_kwarg) - return cls( - name, - pos, - kwonly, - posonly, - star_arg, - star_kwarg, - ) + star_arg = _is_set(params.star_arg) + star_kwarg = _is_set(params.star_kwarg) + return cls(name, pos, kwonly, posonly, star_arg, star_kwarg) @dataclass(frozen=True) @@ -166,149 +80,7 @@ class FunctionAnnotation: returns: Optional[cst.Annotation] -@dataclass -class Annotations: - """ - Represents all of the annotation information we might add to - a class: - - All data is keyed on the qualified name relative to the module root - - The ``functions`` field also keys on the signature so that we - do not apply stub types where the signature is incompatible. - - The idea is that - - ``functions`` contains all function and method type - information from the stub, and the qualifier for a method includes - the containing class names (e.g. "Cat.meow") - - ``attributes`` similarly contains all globals - and class-level attribute type information. - - The ``class_definitions`` field contains all of the classes - defined in the stub. Most of these classes will be ignored in - downstream logic (it is *not* used to annotate attributes or - method), but there are some cases like TypedDict where a - typing-only class needs to be injected. - - The field ``typevars`` contains the assign statement for all - type variables in the stub, and ``names`` tracks - all of the names used in annotations; together these fields - tell us which typevars should be included in the codemod - (all typevars that appear in annotations.) - """ - - # TODO: consider simplifying this in a few ways: - # - We could probably just inject all typevars, used or not. - # It doesn't seem to me that our codemod needs to act like - # a linter checking for unused names. - # - We could probably decide which classes are typing-only - # in the visitor rather than the codemod, which would make - # it easier to reason locally about (and document) how the - # class_definitions field works. - - functions: Dict[FunctionKey, FunctionAnnotation] - attributes: Dict[str, cst.Annotation] - class_definitions: Dict[str, cst.ClassDef] - typevars: Dict[str, cst.Assign] - names: Set[str] - - @classmethod - def empty(cls) -> "Annotations": - return Annotations({}, {}, {}, {}, set()) - - def update(self, other: "Annotations") -> None: - self.functions.update(other.functions) - self.attributes.update(other.attributes) - self.class_definitions.update(other.class_definitions) - self.typevars.update(other.typevars) - self.names.update(other.names) - - def finish(self) -> None: - self.typevars = {k: v for k, v in self.typevars.items() if k in self.names} - - -@dataclass(frozen=True) -class ImportedSymbol: - """Import of foo.Bar, where both foo and Bar are potentially aliases.""" - - module_name: str - module_alias: Optional[str] = None - target_name: Optional[str] = None - target_alias: Optional[str] = None - - @property - def symbol(self) -> Optional[str]: - return self.target_alias or self.target_name - - @property - def module_symbol(self) -> str: - return self.module_alias or self.module_name - - -class ImportedSymbolCollector(m.MatcherDecoratableVisitor): - """ - Collect imported symbols from a stub module. - """ - - METADATA_DEPENDENCIES = ( - PositionProvider, - QualifiedNameProvider, - ) - - def __init__(self, existing_imports: Set[str], context: CodemodContext) -> None: - super().__init__() - self.existing_imports: Set[str] = existing_imports - self.imported_symbols: Dict[str, Set[ImportedSymbol]] = defaultdict(set) - self.in_annotation: bool = False - - def visit_Annotation(self, node: cst.Annotation) -> None: - self.in_annotation = True - - def leave_Annotation(self, original_node: cst.Annotation) -> None: - self.in_annotation = False - - def visit_ClassDef(self, node: cst.ClassDef) -> None: - for base in node.bases: - value = base.value - if isinstance(value, NAME_OR_ATTRIBUTE): - self._handle_NameOrAttribute(value) - - def visit_Name(self, node: cst.Name) -> None: - if self.in_annotation: - self._handle_NameOrAttribute(node) - - def visit_Attribute(self, node: cst.Attribute) -> None: - if self.in_annotation: - self._handle_NameOrAttribute(node) - - def visit_Subscript(self, node: cst.Subscript) -> bool: - if isinstance(node.value, NAME_OR_ATTRIBUTE): - return True - return _get_unique_qualified_name(self, node) not in ("Type", "typing.Type") - - def _handle_NameOrAttribute( - self, - node: NameOrAttribute, - ) -> None: - # Adds the qualified name to the list of imported symbols - obj = sym = None # keep pyre happy - if isinstance(node, cst.Name): - obj = None - sym = node.value - elif isinstance(node, cst.Attribute): - obj = node.value.value # pyre-ignore[16] - sym = node.attr.value - qualified_name = _get_unique_qualified_name(self, node) - module, target = _module_and_target(qualified_name) - if module in ("", "builtins"): - return - elif qualified_name not in self.existing_imports: - mod = ImportedSymbol( - module_name=module, - module_alias=obj if obj != module else None, - target_name=target, - target_alias=sym if sym != target else None, - ) - self.imported_symbols[sym].add(mod) - - -class TypeCollector(m.MatcherDecoratableVisitor): +class TypeCollector(cst.CSTVisitor): """ Collect type annotations from a stub module. """ @@ -318,44 +90,25 @@ class TypeCollector(m.MatcherDecoratableVisitor): QualifiedNameProvider, ) - annotations: Annotations - - def __init__( - self, - existing_imports: Set[str], - module_imports: Dict[str, ImportItem], - context: CodemodContext, - ) -> None: - super().__init__() - self.context = context - # Existing imports, determined by looking at the target module. - # Used to help us determine when a type in a stub will require new imports. - # - # The contents of this are fully-qualified names of types in scope - # as well as module names, although downstream we effectively ignore - # the module names as of the current implementation. - self.existing_imports: Set[str] = existing_imports - # Module imports, gathered by prescanning the stub file to determine - # which modules need to be imported directly to qualify their symbols. - self.module_imports: Dict[str, ImportItem] = module_imports - # Fields that help us track temporary state as we recurse + def __init__(self, existing_imports: Set[str], context: CodemodContext) -> None: + # Qualifier for storing the canonical name of the current function. self.qualifier: List[str] = [] - self.current_assign: Optional[cst.Assign] = None # used to collect typevars # Store the annotations. - self.annotations = Annotations.empty() + self.function_annotations: Dict[FunctionKey, FunctionAnnotation] = {} + self.attribute_annotations: Dict[str, cst.Annotation] = {} + self.existing_imports: Set[str] = existing_imports + self.class_definitions: Dict[str, cst.ClassDef] = {} + self.context = context - def visit_ClassDef( - self, - node: cst.ClassDef, - ) -> None: + def visit_ClassDef(self, node: cst.ClassDef) -> None: self.qualifier.append(node.name.value) new_bases = [] for base in node.bases: value = base.value if isinstance(value, NAME_OR_ATTRIBUTE): - new_value = value.visit(_TypeCollectorDequalifier(self)) + new_value = self._handle_NameOrAttribute(value) elif isinstance(value, cst.Subscript): - new_value = value.visit(_TypeCollectorDequalifier(self)) + new_value = self._handle_Subscript(value) else: start = self.get_metadata(PositionProvider, node).start raise ValueError( @@ -365,100 +118,69 @@ class TypeCollector(m.MatcherDecoratableVisitor): ) new_bases.append(base.with_changes(value=new_value)) - self.annotations.class_definitions[node.name.value] = node.with_changes( - bases=new_bases - ) + self.class_definitions[node.name.value] = node.with_changes(bases=new_bases) - def leave_ClassDef( - self, - original_node: cst.ClassDef, - ) -> None: + def leave_ClassDef(self, original_node: cst.ClassDef) -> None: self.qualifier.pop() - def visit_FunctionDef( - self, - node: cst.FunctionDef, - ) -> bool: + def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: self.qualifier.append(node.name.value) returns = node.returns return_annotation = ( - returns.visit(_TypeCollectorDequalifier(self)) - if returns is not None - else None - ) - assert return_annotation is None or isinstance( - return_annotation, cst.Annotation + self._handle_Annotation(annotation=returns) if returns is not None else None ) parameter_annotations = self._handle_Parameters(node.params) name = ".".join(self.qualifier) key = FunctionKey.make(name, node.params) - self.annotations.functions[key] = FunctionAnnotation( + self.function_annotations[key] = FunctionAnnotation( parameters=parameter_annotations, returns=return_annotation ) # pyi files don't support inner functions, return False to stop the traversal. return False - def leave_FunctionDef( - self, - original_node: cst.FunctionDef, - ) -> None: + def leave_FunctionDef(self, original_node: cst.FunctionDef) -> None: self.qualifier.pop() - def visit_AnnAssign( - self, - node: cst.AnnAssign, - ) -> bool: + def visit_AnnAssign(self, node: cst.AnnAssign) -> bool: name = get_full_name_for_node(node.target) if name is not None: self.qualifier.append(name) - annotation_value = node.annotation.visit(_TypeCollectorDequalifier(self)) - assert isinstance(annotation_value, cst.Annotation) - self.annotations.attributes[".".join(self.qualifier)] = annotation_value + annotation_value = self._handle_Annotation(annotation=node.annotation) + self.attribute_annotations[".".join(self.qualifier)] = annotation_value return True - def leave_AnnAssign( - self, - original_node: cst.AnnAssign, - ) -> None: + def leave_AnnAssign(self, original_node: cst.AnnAssign) -> None: self.qualifier.pop() - def visit_Assign( - self, - node: cst.Assign, - ) -> None: - self.current_assign = node + def _get_unique_qualified_name(self, node: cst.CSTNode) -> str: + name = None + names = [q.name for q in self.get_metadata(QualifiedNameProvider, node)] + if len(names) == 0: + # we hit this branch if the stub is directly using a fully + # qualified name, which is not technically valid python but is + # convenient to allow. + name = get_full_name_for_node(node) + elif len(names) == 1 and isinstance(names[0], str): + name = names[0] + if name is None: + start = self.get_metadata(PositionProvider, node).start + raise ValueError( + "Could not resolve a unique qualified name for type " + + f"{get_full_name_for_node(node)} at {start.line}:{start.column}. " + + f"Candidate names were: {names!r}" + ) + return name - def leave_Assign( + def _get_qualified_name_and_dequalified_node( self, - original_node: cst.Assign, - ) -> None: - self.current_assign = None + node: Union[cst.Name, cst.Attribute], + ) -> Tuple[str, Union[cst.Name, cst.Attribute]]: + qualified_name = self._get_unique_qualified_name(node) + dequalified_node = node.attr if isinstance(node, cst.Attribute) else node + return qualified_name, dequalified_node - @m.call_if_inside(m.Assign()) - @m.visit(m.Call(func=m.Name("TypeVar"))) - def record_typevar( - self, - node: cst.Call, - ) -> None: - # pyre-ignore current_assign is never None here - name = get_full_name_for_node(self.current_assign.targets[0].target) - if name is not None: - # pyre-ignore current_assign is never None here - self.annotations.typevars[name] = self.current_assign - self._handle_qualification_and_should_qualify("typing.TypeVar") - self.current_assign = None - - def leave_Module( - self, - original_node: cst.Module, - ) -> None: - self.annotations.finish() - - def _module_and_target( - self, - qualified_name: str, - ) -> Tuple[str, str]: + def _module_and_target(self, qualified_name: str) -> Tuple[str, str]: relative_prefix = "" while qualified_name.startswith("."): relative_prefix += "." @@ -470,11 +192,9 @@ class TypeCollector(m.MatcherDecoratableVisitor): qualifier, target = split return (relative_prefix + qualifier, target) - def _handle_qualification_and_should_qualify( - self, qualified_name: str, node: Optional[cst.CSTNode] = None - ) -> bool: + def _handle_qualification_and_should_qualify(self, qualified_name: str) -> bool: """ - Based on a qualified name and the existing module imports, record that + Basd on a qualified name and the existing module imports, record that we need to add an import if necessary and return whether or not we should use the qualified name due to a preexisting import. """ @@ -482,47 +202,99 @@ class TypeCollector(m.MatcherDecoratableVisitor): if module in ("", "builtins"): return False elif qualified_name not in self.existing_imports: - if module in self.existing_imports: - return True - elif module in self.module_imports: - m = self.module_imports[module] - if m.obj_name is None: - asname = m.alias - else: - asname = None - AddImportsVisitor.add_needed_import( - self.context, m.module_name, asname=asname - ) + if module == "builtins": + return False + elif module in self.existing_imports: return True else: - if node and isinstance(node, cst.Name) and node.value != target: - asname = node.value - else: - asname = None - AddImportsVisitor.add_needed_import( - self.context, - module, - target, - asname=asname, - ) + AddImportsVisitor.add_needed_import(self.context, module, target) return False return False - # Handler functions + # Handler functions. + # + # Each of these does one of two things, possibly recursively, over some + # valid CST node for a static type: + # - process the qualified name and ensure we will add necessary imports + # - dequalify the node - def _handle_Parameters( + def _handle_NameOrAttribute( self, - parameters: cst.Parameters, - ) -> cst.Parameters: - def update_annotations( - parameters: Sequence[cst.Param], - ) -> List[cst.Param]: + node: NameOrAttribute, + ) -> Union[cst.Name, cst.Attribute]: + ( + qualified_name, + dequalified_node, + ) = self._get_qualified_name_and_dequalified_node(node) + should_qualify = self._handle_qualification_and_should_qualify(qualified_name) + if should_qualify: + return node + else: + return dequalified_node + + def _handle_Index(self, slice: cst.Index) -> cst.Index: + value = slice.value + if isinstance(value, cst.Subscript): + return slice.with_changes(value=self._handle_Subscript(value)) + elif isinstance(value, cst.Attribute): + return slice.with_changes(value=self._handle_NameOrAttribute(value)) + else: + return slice + + def _handle_Subscript(self, node: cst.Subscript) -> cst.Subscript: + value = node.value + if isinstance(value, NAME_OR_ATTRIBUTE): + new_node = node.with_changes(value=self._handle_NameOrAttribute(value)) + else: + raise ValueError("Expected any indexed type to have") + if self._get_unique_qualified_name(node) in ("Type", "typing.Type"): + # Note: we are intentionally not handling qualification of + # anything inside `Type` because it's common to have nested + # classes, which we cannot currently distinguish from classes + # coming from other modules, appear here. + return new_node + slice = node.slice + if isinstance(slice, tuple): + new_slice = [] + for item in slice: + value = item.slice.value + if isinstance(value, NAME_OR_ATTRIBUTE): + name = self._handle_NameOrAttribute(item.slice.value) + new_index = item.slice.with_changes(value=name) + new_slice.append(item.with_changes(slice=new_index)) + else: + if isinstance(item.slice, cst.Index): + new_index = item.slice.with_changes( + value=self._handle_Index(item.slice) + ) + item = item.with_changes(slice=new_index) + new_slice.append(item) + return new_node.with_changes(slice=tuple(new_slice)) + elif isinstance(slice, cst.Index): + new_slice = self._handle_Index(slice) + return new_node.with_changes(slice=new_slice) + else: + return new_node + + def _handle_Annotation(self, annotation: cst.Annotation) -> cst.Annotation: + node = annotation.annotation + if isinstance(node, cst.SimpleString): + return annotation + elif isinstance(node, cst.Subscript): + return cst.Annotation(annotation=self._handle_Subscript(node)) + elif isinstance(node, NAME_OR_ATTRIBUTE): + return cst.Annotation(annotation=self._handle_NameOrAttribute(node)) + else: + raise ValueError(f"Unexpected annotation node: {node}") + + def _handle_Parameters(self, parameters: cst.Parameters) -> cst.Parameters: + def update_annotations(parameters: Sequence[cst.Param]) -> List[cst.Param]: updated_parameters = [] for parameter in list(parameters): annotation = parameter.annotation if annotation is not None: parameter = parameter.with_changes( - annotation=annotation.visit(_TypeCollectorDequalifier(self)) + annotation=self._handle_Annotation(annotation=annotation) ) updated_parameters.append(parameter) return updated_parameters @@ -530,71 +302,13 @@ class TypeCollector(m.MatcherDecoratableVisitor): return parameters.with_changes(params=update_annotations(parameters.params)) -class _TypeCollectorDequalifier(cst.CSTTransformer): - def __init__(self, type_collector: "TypeCollector") -> None: - self.type_collector = type_collector - - def leave_Name( - self, original_node: cst.Name, updated_node: cst.Name - ) -> NameOrAttribute: - qualified_name = _get_unique_qualified_name(self.type_collector, original_node) - should_qualify = self.type_collector._handle_qualification_and_should_qualify( - qualified_name, original_node - ) - self.type_collector.annotations.names.add(qualified_name) - if should_qualify: - parts = qualified_name.split(".") - qualified_node = cst.Name(parts[0]) - for p in parts[1:]: - qualified_node = cst.Attribute(qualified_node, cst.Name(p)) - return qualified_node - else: - return original_node - - def visit_Attribute(self, node: cst.Attribute) -> bool: - return False - - def leave_Attribute( - self, original_node: cst.Attribute, updated_node: cst.Attribute - ) -> cst.BaseExpression: - qualified_name = _get_unique_qualified_name(self.type_collector, original_node) - should_qualify = self.type_collector._handle_qualification_and_should_qualify( - qualified_name, original_node - ) - self.type_collector.annotations.names.add(qualified_name) - if should_qualify: - return original_node - else: - return original_node.attr - - def leave_Index( - self, original_node: cst.Index, updated_node: cst.Index - ) -> cst.Index: - if isinstance(original_node.value, cst.SimpleString): - self.type_collector.annotations.names.add( - _get_string_value(original_node.value) - ) - return updated_node - - def visit_Subscript(self, node: cst.Subscript) -> bool: - return _get_unique_qualified_name(self.type_collector, node) not in ( - "Type", - "typing.Type", - ) - - def leave_Subscript( - self, original_node: cst.Subscript, updated_node: cst.Subscript - ) -> cst.Subscript: - if _get_unique_qualified_name(self.type_collector, original_node) in ( - "Type", - "typing.Type", - ): - # Note: we are intentionally not handling qualification of - # anything inside `Type` because it's common to have nested - # classes, which we cannot currently distinguish from classes - # coming from other modules, appear here. - return original_node.with_changes(value=original_node.value.visit(self)) - return updated_node +@dataclass(frozen=True) +class Annotations: + function_annotations: Dict[FunctionKey, FunctionAnnotation] = field( + default_factory=dict + ) + attribute_annotations: Dict[str, cst.Annotation] = field(default_factory=dict) + class_definitions: Dict[str, cst.ClassDef] = field(default_factory=dict) @dataclass @@ -604,7 +318,6 @@ class AnnotationCounts: parameter_annotations: int = 0 return_annotations: int = 0 classes_added: int = 0 - typevars_and_generics_added: int = 0 def any_changes_applied(self) -> bool: return ( @@ -613,7 +326,6 @@ class AnnotationCounts: + self.parameter_annotations + self.return_annotations + self.classes_added - + self.typevars_and_generics_added ) > 0 @@ -625,12 +337,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): This is one of the transforms that is available automatically to you when running a codemod. To use it in this manner, import - :class:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor` and then call - the static - :meth:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor.store_stub_in_context` - method, giving it the current context (found as ``self.context`` for all - subclasses of :class:`~libcst.codemod.Codemod`), the stub module from which - you wish to add annotations. + :class:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor` and then call the static + :meth:`~libcst.codemod.visitors.ApplyTypeAnnotationsVisitor.store_stub_in_context` method, + giving it the current context (found as ``self.context`` for all subclasses of + :class:`~libcst.codemod.Codemod`), the stub module from which you wish to add annotations. For example, you can store the type annotation ``int`` for ``x`` using:: @@ -647,8 +357,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): x: int = 1 - If the function or attribute already has a type annotation, it will not be - overwritten. + If the function or attribute already has a type annotation, it will not be overwritten. To overwrite existing annotations when applying annotations from a stub, use the keyword argument ``overwrite_existing_annotations=True`` when @@ -665,13 +374,12 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): use_future_annotations: bool = False, strict_posargs_matching: bool = True, strict_annotation_matching: bool = False, - always_qualify_annotations: bool = False, ) -> None: super().__init__(context) # Qualifier for storing the canonical name of the current function. self.qualifier: List[str] = [] self.annotations: Annotations = ( - Annotations.empty() if annotations is None else annotations + Annotations() if annotations is None else annotations ) self.toplevel_annotations: Dict[str, cst.Annotation] = {} self.visited_classes: Set[str] = set() @@ -679,7 +387,6 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.use_future_annotations = use_future_annotations self.strict_posargs_matching = strict_posargs_matching self.strict_annotation_matching = strict_annotation_matching - self.always_qualify_annotations = always_qualify_annotations # We use this to determine the end of the import block so that we can # insert top-level annotations. @@ -690,19 +397,6 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # only made changes to the imports. self.annotation_counts: AnnotationCounts = AnnotationCounts() - # We use this to collect typevars, to avoid importing existing ones from the pyi file - self.current_assign: Optional[cst.Assign] = None - self.typevars: Dict[str, cst.Assign] = {} - - # Global variables and classes defined on the toplevel of the target module. - # Used to help determine which names we need to check are in scope, and add - # quotations to avoid undefined forward references in type annotations. - self.global_names: Set[str] = set() - - # We use this to avoid annotating multiple assignments to the same - # symbol in a given scope - self.already_annotated: Set[str] = set() - @staticmethod def store_stub_in_context( context: CodemodContext, @@ -711,7 +405,6 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): use_future_annotations: bool = False, strict_posargs_matching: bool = True, strict_annotation_matching: bool = False, - always_qualify_annotations: bool = False, ) -> None: """ Store a stub module in the :class:`~libcst.codemod.CodemodContext` so @@ -730,29 +423,17 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): use_future_annotations, strict_posargs_matching, strict_annotation_matching, - always_qualify_annotations, ) - def transform_module_impl( - self, - tree: cst.Module, - ) -> cst.Module: + def transform_module_impl(self, tree: cst.Module) -> cst.Module: """ Collect type annotations from all stubs and apply them to ``tree``. Gather existing imports from ``tree`` so that we don't add duplicate imports. - - Gather global names from ``tree`` so forward references are quoted. """ import_gatherer = GatherImportsVisitor(CodemodContext()) tree.visit(import_gatherer) - existing_import_names = _get_imported_names(import_gatherer.all_imports) - - global_names_gatherer = GatherGlobalNamesVisitor(CodemodContext()) - tree.visit(global_names_gatherer) - self.global_names = global_names_gatherer.global_names.union( - global_names_gatherer.class_names - ) + existing_import_names = _get_import_names(import_gatherer.all_imports) context_contents = self.context.scratch.get( ApplyTypeAnnotationsVisitor.CONTEXT_KEY @@ -764,7 +445,6 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): use_future_annotations, strict_posargs_matching, strict_annotation_matching, - always_qualify_annotations, ) = context_contents self.overwrite_existing_annotations = ( self.overwrite_existing_annotations or overwrite_existing_annotations @@ -778,20 +458,20 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.strict_annotation_matching = ( self.strict_annotation_matching or strict_annotation_matching ) - self.always_qualify_annotations = ( - self.always_qualify_annotations or always_qualify_annotations - ) - module_imports = self._get_module_imports(stub, import_gatherer) - visitor = TypeCollector(existing_import_names, module_imports, self.context) + visitor = TypeCollector(existing_import_names, self.context) cst.MetadataWrapper(stub).visit(visitor) - self.annotations.update(visitor.annotations) - - if self.use_future_annotations: - AddImportsVisitor.add_needed_import( - self.context, "__future__", "annotations" - ) - tree_with_imports = AddImportsVisitor(self.context).transform_module(tree) + self.annotations.function_annotations.update(visitor.function_annotations) + self.annotations.attribute_annotations.update(visitor.attribute_annotations) + self.annotations.class_definitions.update(visitor.class_definitions) + tree_with_imports = AddImportsVisitor( + context=self.context, + imports=( + [ImportItem("__future__", "annotations", None)] + if self.use_future_annotations + else () + ), + ).transform_module(tree) tree_with_changes = tree_with_imports.visit(self) # don't modify the imports if we didn't actually add any type information @@ -800,99 +480,6 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): else: return tree - # helpers for collecting type information from the stub files - - def _get_module_imports( # noqa: C901: too complex - self, stub: cst.Module, existing_import_gatherer: GatherImportsVisitor - ) -> Dict[str, ImportItem]: - """Returns a dict of modules that need to be imported to qualify symbols.""" - # We correlate all imported symbols, e.g. foo.bar.Baz, with a list of module - # and from imports. If the same unqualified symbol is used from different - # modules, we give preference to an explicit from-import if any, and qualify - # everything else by importing the module. - # - # e.g. the following stub: - # import foo as quux - # from bar import Baz as X - # def f(x: X) -> quux.X: ... - # will return {'foo': ImportItem("foo", "quux")}. When the apply type - # annotation visitor hits `quux.X` it will retrieve the canonical name - # `foo.X` and then note that `foo` is in the module imports map, so it will - # leave the symbol qualified. - import_gatherer = GatherImportsVisitor(CodemodContext()) - stub.visit(import_gatherer) - symbol_map = import_gatherer.symbol_mapping - existing_import_names = _get_imported_names( - existing_import_gatherer.all_imports - ) - symbol_collector = ImportedSymbolCollector(existing_import_names, self.context) - cst.MetadataWrapper(stub).visit(symbol_collector) - module_imports = {} - for sym, imported_symbols in symbol_collector.imported_symbols.items(): - existing = existing_import_gatherer.symbol_mapping.get(sym) - if existing and any( - s.module_name != existing.module_name for s in imported_symbols - ): - # If a symbol is imported in the main file, we have to qualify - # it when imported from a different module in the stub file. - used = True - elif len(imported_symbols) == 1 and not self.always_qualify_annotations: - # If we have a single use of a new symbol we can from-import it - continue - else: - # There are multiple occurrences in the stub file and none in - # the main file. At least one can be from-imported. - used = False - for imp_sym in imported_symbols: - if not imp_sym.symbol: - continue - imp = symbol_map.get(imp_sym.symbol) - if self.always_qualify_annotations and sym not in existing_import_names: - # Override 'always qualify' if this is a typing import, or - # the main file explicitly from-imports a symbol. - if imp and imp.module_name != "typing": - module_imports[imp.module_name] = imp - else: - imp = symbol_map.get(imp_sym.module_symbol) - if imp: - module_imports[imp.module_name] = imp - elif not used and imp and imp.module_name == imp_sym.module_name: - # We can only import a symbol directly once. - used = True - elif sym in existing_import_names: - if imp: - module_imports[imp.module_name] = imp - else: - imp = symbol_map.get(imp_sym.module_symbol) - if imp: - # imp will be None in corner cases like - # import foo.bar as Baz - # x: Baz - # which is technically valid python but nonsensical as a - # type annotation. Dropping it on the floor for now. - module_imports[imp.module_name] = imp - return module_imports - - # helpers for processing annotation nodes - def _quote_future_annotations(self, annotation: cst.Annotation) -> cst.Annotation: - # TODO: We probably want to make sure references to classes defined in the current - # module come to us fully qualified - so we can do the dequalification here and - # know to look for what is in-scope without also catching builtins like "None" in the - # quoting. This should probably also be extended to handle what imports are in scope, - # as well as subscriptable types. - # Note: We are collecting all imports and passing this to the type collector grabbing - # annotations from the stub file; should consolidate import handling somewhere too. - node = annotation.annotation - if ( - isinstance(node, cst.Name) - and (node.value in self.global_names) - and not (node.value in self.visited_classes) - ): - return annotation.with_changes( - annotation=cst.SimpleString(value=f'"{node.value}"') - ) - return annotation - # smart constructors: all applied annotations happen via one of these def _apply_annotation_to_attribute_or_global( @@ -905,11 +492,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): self.annotation_counts.global_annotations += 1 else: self.annotation_counts.attribute_annotations += 1 - return cst.AnnAssign( - cst.Name(name), - self._quote_future_annotations(annotation), - value, - ) + return cst.AnnAssign(cst.Name(name), annotation, value) def _apply_annotation_to_parameter( self, @@ -918,7 +501,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): ) -> cst.Param: self.annotation_counts.parameter_annotations += 1 return parameter.with_changes( - annotation=self._quote_future_annotations(annotation), + annotation=annotation, ) def _apply_annotation_to_return( @@ -927,9 +510,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): annotation: cst.Annotation, ) -> cst.FunctionDef: self.annotation_counts.return_annotations += 1 - return function_def.with_changes( - returns=self._quote_future_annotations(annotation), - ) + return function_def.with_changes(returns=annotation) # private methods used in the visit and leave methods @@ -937,16 +518,14 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return ".".join(self.qualifier) def _annotate_single_target( - self, - node: cst.Assign, - updated_node: cst.Assign, + self, node: cst.Assign, updated_node: cst.Assign ) -> Union[cst.Assign, cst.AnnAssign]: only_target = node.targets[0].target if isinstance(only_target, (cst.Tuple, cst.List)): for element in only_target.elements: value = element.value name = get_full_name_for_node(value) - if name is not None and name != "_": + if name: self._add_to_toplevel_annotations(name) elif isinstance(only_target, (cst.Subscript)): pass @@ -954,27 +533,25 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): name = get_full_name_for_node(only_target) if name is not None: self.qualifier.append(name) - qualifier_name = self._qualifier_name() - if qualifier_name in self.annotations.attributes and not isinstance( - only_target, (cst.Attribute, cst.Subscript) + if ( + self._qualifier_name() in self.annotations.attribute_annotations + and not isinstance(only_target, cst.Subscript) ): - if qualifier_name not in self.already_annotated: - self.already_annotated.add(qualifier_name) - annotation = self.annotations.attributes[qualifier_name] - self.qualifier.pop() - return self._apply_annotation_to_attribute_or_global( - name=name, - annotation=annotation, - value=node.value, - ) + annotation = self.annotations.attribute_annotations[ + self._qualifier_name() + ] + self.qualifier.pop() + return self._apply_annotation_to_attribute_or_global( + name=name, + annotation=annotation, + value=node.value, + ) else: self.qualifier.pop() return updated_node def _split_module( - self, - module: cst.Module, - updated_module: cst.Module, + self, module: cst.Module, updated_module: cst.Module ) -> Tuple[ List[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], List[Union[cst.SimpleStatementLine, cst.BaseCompoundStatement]], @@ -997,20 +574,15 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): list(updated_module.body[import_add_location:]), ) - def _add_to_toplevel_annotations( - self, - name: str, - ) -> None: + def _add_to_toplevel_annotations(self, name: str) -> None: self.qualifier.append(name) - if self._qualifier_name() in self.annotations.attributes: - annotation = self.annotations.attributes[self._qualifier_name()] + if self._qualifier_name() in self.annotations.attribute_annotations: + annotation = self.annotations.attribute_annotations[self._qualifier_name()] self.toplevel_annotations[name] = annotation self.qualifier.pop() def _update_parameters( - self, - annotations: FunctionAnnotation, - updated_node: cst.FunctionDef, + self, annotations: FunctionAnnotation, updated_node: cst.FunctionDef ) -> cst.Parameters: # Update params and default params with annotations # Don't override existing annotations or default values unless asked @@ -1041,7 +613,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): annotated_parameters.append(parameter) return annotated_parameters - return updated_node.params.with_changes( + return annotations.parameters.with_changes( params=update_annotation( updated_node.params.params, annotations.parameters.params, @@ -1091,14 +663,9 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): """Check that function annotations on both signatures are compatible.""" def compatible( - p: Optional[cst.Annotation], - q: Optional[cst.Annotation], + p: Optional[cst.Annotation], q: Optional[cst.Annotation] ) -> bool: - if ( - self.overwrite_existing_annotations - or not _is_non_sentinel(p) - or not _is_non_sentinel(q) - ): + if self.overwrite_existing_annotations or not _is_set(p) or not _is_set(q): return True if not self.strict_annotation_matching: # We will not overwrite clashing annotations, but the signature as a @@ -1106,10 +673,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return True return p.annotation.deep_equals(q.annotation) # pyre-ignore[16] - def match_posargs( - ps: Sequence[cst.Param], - qs: Sequence[cst.Param], - ) -> bool: + def match_posargs(ps: Sequence[cst.Param], qs: Sequence[cst.Param]) -> bool: if len(ps) != len(qs): return False for p, q in zip(ps, qs): @@ -1119,10 +683,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return False return True - def match_kwargs( - ps: Sequence[cst.Param], - qs: Sequence[cst.Param], - ) -> bool: + def match_kwargs(ps: Sequence[cst.Param], qs: Sequence[cst.Param]) -> bool: ps_dict = {x.name.value: x for x in ps} qs_dict = {x.name.value: x for x in qs} if set(ps_dict.keys()) != set(qs_dict.keys()): @@ -1132,16 +693,10 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return False return True - def match_star( - p: StarParamType, - q: StarParamType, - ) -> bool: - return _is_non_sentinel(p) == _is_non_sentinel(q) + def match_star(p: StarParamType, q: StarParamType) -> bool: + return _is_set(p) == _is_set(q) - def match_params( - f: cst.FunctionDef, - g: FunctionAnnotation, - ) -> bool: + def match_params(f: cst.FunctionDef, g: FunctionAnnotation) -> bool: p, q = f.params, g.parameters return ( match_posargs(p.params, q.params) @@ -1151,10 +706,7 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): and match_star(p.star_kwarg, q.star_kwarg) ) - def match_return( - f: cst.FunctionDef, - g: FunctionAnnotation, - ) -> bool: + def match_return(f: cst.FunctionDef, g: FunctionAnnotation) -> bool: return compatible(f.returns, g.returns) return match_params(function, annotations) and match_return( @@ -1163,47 +715,28 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): # transform API methods - def visit_ClassDef( - self, - node: cst.ClassDef, - ) -> None: + def visit_ClassDef(self, node: cst.ClassDef) -> None: self.qualifier.append(node.name.value) + self.visited_classes.add(node.name.value) def leave_ClassDef( - self, - original_node: cst.ClassDef, - updated_node: cst.ClassDef, + self, original_node: cst.ClassDef, updated_node: cst.ClassDef ) -> cst.ClassDef: - self.visited_classes.add(original_node.name.value) - cls_name = ".".join(self.qualifier) self.qualifier.pop() - definition = self.annotations.class_definitions.get(cls_name) - if definition: - b1 = _find_generic_base(definition) - b2 = _find_generic_base(updated_node) - if b1 and not b2: - new_bases = list(updated_node.bases) + [b1] - self.annotation_counts.typevars_and_generics_added += 1 - return updated_node.with_changes(bases=new_bases) return updated_node - def visit_FunctionDef( - self, - node: cst.FunctionDef, - ) -> bool: + def visit_FunctionDef(self, node: cst.FunctionDef) -> bool: self.qualifier.append(node.name.value) # pyi files don't support inner functions, return False to stop the traversal. return False def leave_FunctionDef( - self, - original_node: cst.FunctionDef, - updated_node: cst.FunctionDef, + self, original_node: cst.FunctionDef, updated_node: cst.FunctionDef ) -> cst.FunctionDef: key = FunctionKey.make(self._qualifier_name(), updated_node.params) self.qualifier.pop() - if key in self.annotations.functions: - function_annotation = self.annotations.functions[key] + if key in self.annotations.function_annotations: + function_annotation = self.annotations.function_annotations[key] # Only add new annotation if: # * we have matching function signatures and # * we are explicitly told to overwrite existing annotations or @@ -1223,42 +756,16 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return updated_node.with_changes(params=new_parameters) return updated_node - def visit_Assign( - self, - node: cst.Assign, - ) -> None: - self.current_assign = node - - @m.call_if_inside(m.Assign()) - @m.visit(m.Call(func=m.Name("TypeVar"))) - def record_typevar( - self, - node: cst.Call, - ) -> None: - # pyre-ignore current_assign is never None here - name = get_full_name_for_node(self.current_assign.targets[0].target) - if name is not None: - # Preserve the whole node, even though we currently just use the - # name, so that we can match bounds and variance at some point and - # determine if two typevars with the same name are indeed the same. - - # pyre-ignore current_assign is never None here - self.typevars[name] = self.current_assign - self.current_assign = None - def leave_Assign( - self, - original_node: cst.Assign, - updated_node: cst.Assign, + self, original_node: cst.Assign, updated_node: cst.Assign ) -> Union[cst.Assign, cst.AnnAssign]: - self.current_assign = None if len(original_node.targets) > 1: for assign in original_node.targets: target = assign.target if isinstance(target, (cst.Name, cst.Attribute)): name = get_full_name_for_node(target) - if name is not None and name != "_": + if name is not None: # Add separate top-level annotations for `a = b = 1` # as `a: int` and `b: int`. self._add_to_toplevel_annotations(name) @@ -1267,34 +774,21 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): return self._annotate_single_target(original_node, updated_node) def leave_ImportFrom( - self, - original_node: cst.ImportFrom, - updated_node: cst.ImportFrom, + self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom ) -> cst.ImportFrom: self.import_statements.append(original_node) return updated_node def leave_Module( - self, - original_node: cst.Module, - updated_node: cst.Module, + self, original_node: cst.Module, updated_node: cst.Module ) -> cst.Module: fresh_class_definitions = [ definition for name, definition in self.annotations.class_definitions.items() if name not in self.visited_classes ] - - # NOTE: The entire change will also be abandoned if - # self.annotation_counts is all 0s, so if adding any new category make - # sure to record it there. - if not ( - self.toplevel_annotations - or fresh_class_definitions - or self.annotations.typevars - ): + if not self.toplevel_annotations and not fresh_class_definitions: return updated_node - toplevel_statements = [] # First, find the insertion point for imports statements_before_imports, statements_after_imports = self._split_module( @@ -1312,18 +806,6 @@ class ApplyTypeAnnotationsVisitor(ContextAwareTransformer): ) toplevel_statements.append(cst.SimpleStatementLine([annotated_assign])) - # TypeVar definitions could be scattered through the file, so do not - # attempt to put new ones with existing ones, just add them at the top. - typevars = { - k: v for k, v in self.annotations.typevars.items() if k not in self.typevars - } - if typevars: - for var, stmt in typevars.items(): - toplevel_statements.append(cst.Newline()) - toplevel_statements.append(stmt) - self.annotation_counts.typevars_and_generics_added += 1 - toplevel_statements.append(cst.Newline()) - self.annotation_counts.classes_added = len(fresh_class_definitions) toplevel_statements.extend(fresh_class_definitions) diff --git a/libcst/codemod/visitors/_gather_exports.py b/libcst/codemod/visitors/_gather_exports.py index bb1c8894..a35b389a 100644 --- a/libcst/codemod/visitors/_gather_exports.py +++ b/libcst/codemod/visitors/_gather_exports.py @@ -140,6 +140,6 @@ class GatherExportsVisitor(ContextAwareVisitor): ) -> None: if self._in_assigned_export: name = node.evaluated_value - if not isinstance(name, str): + if name is None: return self.explicit_exported_objects.add(name) diff --git a/libcst/codemod/visitors/_gather_global_names.py b/libcst/codemod/visitors/_gather_global_names.py deleted file mode 100644 index c4a5d57d..00000000 --- a/libcst/codemod/visitors/_gather_global_names.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from typing import Set - -import libcst -from libcst.codemod._context import CodemodContext -from libcst.codemod._visitor import ContextAwareVisitor - - -class GatherGlobalNamesVisitor(ContextAwareVisitor): - """ - Gathers all globally accessible names defined in a module and stores them as - attributes on the instance. - Intended to be instantiated and passed to a :class:`~libcst.Module` - :meth:`~libcst.CSTNode.visit` method in order to gather up information about - names defined on a module. Note that this is not a substitute for scope - analysis or qualified name support. Please see :ref:`libcst-scope-tutorial` - for a more robust way of determining the qualified name and definition for - an arbitrary node. - Names that are globally accessible through imports are currently not included - but can be retrieved with GatherImportsVisitor. - - After visiting a module the following attributes will be populated: - - global_names - A sequence of strings representing global variables defined in the module - toplevel. - class_names - A sequence of strings representing classes defined in the module toplevel. - function_names - A sequence of strings representing functions defined in the module toplevel. - - """ - - def __init__(self, context: CodemodContext) -> None: - super().__init__(context) - self.global_names: Set[str] = set() - self.class_names: Set[str] = set() - self.function_names: Set[str] = set() - # Track scope nesting - self.scope_depth: int = 0 - - def visit_ClassDef(self, node: libcst.ClassDef) -> None: - if self.scope_depth == 0: - self.class_names.add(node.name.value) - self.scope_depth += 1 - - def leave_ClassDef(self, original_node: libcst.ClassDef) -> None: - self.scope_depth -= 1 - - def visit_FunctionDef(self, node: libcst.FunctionDef) -> None: - if self.scope_depth == 0: - self.function_names.add(node.name.value) - self.scope_depth += 1 - - def leave_FunctionDef(self, original_node: libcst.FunctionDef) -> None: - self.scope_depth -= 1 - - def visit_Assign(self, node: libcst.Assign) -> None: - if self.scope_depth != 0: - return - for assign_target in node.targets: - target = assign_target.target - if isinstance(target, libcst.Name): - self.global_names.add(target.value) - - def visit_AnnAssign(self, node: libcst.AnnAssign) -> None: - if self.scope_depth != 0: - return - target = node.target - if isinstance(target, libcst.Name): - self.global_names.add(target.value) diff --git a/libcst/codemod/visitors/_gather_imports.py b/libcst/codemod/visitors/_gather_imports.py index 6b187c53..5d5a50f9 100644 --- a/libcst/codemod/visitors/_gather_imports.py +++ b/libcst/codemod/visitors/_gather_imports.py @@ -8,87 +8,10 @@ from typing import Dict, List, Sequence, Set, Tuple, Union import libcst from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareVisitor -from libcst.codemod.visitors._imports import ImportItem -from libcst.helpers import get_absolute_module_from_package_for_import +from libcst.helpers import get_absolute_module_for_import -class _GatherImportsMixin(ContextAwareVisitor): - """ - A Mixin class for tracking visited imports. - """ - - def __init__(self, context: CodemodContext) -> None: - super().__init__(context) - # Track the available imports in this transform - self.module_imports: Set[str] = set() - self.object_mapping: Dict[str, Set[str]] = {} - # Track the aliased imports in this transform - self.module_aliases: Dict[str, str] = {} - self.alias_mapping: Dict[str, List[Tuple[str, str]]] = {} - # Track the import for every symbol introduced into the module - self.symbol_mapping: Dict[str, ImportItem] = {} - - def _handle_Import(self, node: libcst.Import) -> None: - for name in node.names: - alias = name.evaluated_alias - imp = ImportItem(name.evaluated_name, alias=alias) - if alias is not None: - # Track this as an aliased module - self.module_aliases[name.evaluated_name] = alias - self.symbol_mapping[alias] = imp - else: - # Get the module we're importing as a string. - self.module_imports.add(name.evaluated_name) - self.symbol_mapping[name.evaluated_name] = imp - - def _handle_ImportFrom(self, node: libcst.ImportFrom) -> None: - # Get the module we're importing as a string. - module = get_absolute_module_from_package_for_import( - self.context.full_package_name, node - ) - if module is None: - # Can't get the absolute import from relative, so we can't - # support this. - return - nodenames = node.names - if isinstance(nodenames, libcst.ImportStar): - # We cover everything, no need to bother tracking other things - self.object_mapping[module] = set("*") - return - elif isinstance(nodenames, Sequence): - # Get the list of imports we're aliasing in this import - new_aliases = [ - (ia.evaluated_name, ia.evaluated_alias) - for ia in nodenames - if ia.asname is not None - ] - if new_aliases: - if module not in self.alias_mapping: - self.alias_mapping[module] = [] - # pyre-ignore We know that aliases are not None here. - self.alias_mapping[module].extend(new_aliases) - - # Get the list of imports we're importing in this import - new_objects = {ia.evaluated_name for ia in nodenames if ia.asname is None} - if new_objects: - if module not in self.object_mapping: - self.object_mapping[module] = set() - - # Make sure that we don't add to a '*' module - if "*" in self.object_mapping[module]: - self.object_mapping[module] = set("*") - return - - self.object_mapping[module].update(new_objects) - for ia in nodenames: - imp = ImportItem( - module, obj_name=ia.evaluated_name, alias=ia.evaluated_alias - ) - key = ia.evaluated_alias or ia.evaluated_name - self.symbol_mapping[key] = imp - - -class GatherImportsVisitor(_GatherImportsMixin): +class GatherImportsVisitor(ContextAwareVisitor): """ Gathers all imports in a module and stores them as attributes on the instance. Intended to be instantiated and passed to a :class:`~libcst.Module` @@ -129,15 +52,65 @@ class GatherImportsVisitor(_GatherImportsMixin): def __init__(self, context: CodemodContext) -> None: super().__init__(context) + # Track the available imports in this transform + self.module_imports: Set[str] = set() + self.object_mapping: Dict[str, Set[str]] = {} + # Track the aliased imports in this transform + self.module_aliases: Dict[str, str] = {} + self.alias_mapping: Dict[str, List[Tuple[str, str]]] = {} # Track all of the imports found in this transform self.all_imports: List[Union[libcst.Import, libcst.ImportFrom]] = [] def visit_Import(self, node: libcst.Import) -> None: # Track this import statement for later analysis. self.all_imports.append(node) - self._handle_Import(node) + + for name in node.names: + alias = name.evaluated_alias + if alias is not None: + # Track this as an aliased module + self.module_aliases[name.evaluated_name] = alias + else: + # Get the module we're importing as a string. + self.module_imports.add(name.evaluated_name) def visit_ImportFrom(self, node: libcst.ImportFrom) -> None: # Track this import statement for later analysis. self.all_imports.append(node) - self._handle_ImportFrom(node) + + # Get the module we're importing as a string. + module = get_absolute_module_for_import(self.context.full_module_name, node) + if module is None: + # Can't get the absolute import from relative, so we can't + # support this. + return + nodenames = node.names + if isinstance(nodenames, libcst.ImportStar): + # We cover everything, no need to bother tracking other things + self.object_mapping[module] = set("*") + return + elif isinstance(nodenames, Sequence): + # Get the list of imports we're aliasing in this import + new_aliases = [ + (ia.evaluated_name, ia.evaluated_alias) + for ia in nodenames + if ia.asname is not None + ] + if new_aliases: + if module not in self.alias_mapping: + self.alias_mapping[module] = [] + # pyre-ignore We know that aliases are not None here. + self.alias_mapping[module].extend(new_aliases) + + # Get the list of imports we're importing in this import + new_objects = {ia.evaluated_name for ia in nodenames if ia.asname is None} + if new_objects: + if module not in self.object_mapping: + self.object_mapping[module] = set() + + # Make sure that we don't add to a '*' module + if "*" in self.object_mapping[module]: + self.object_mapping[module] = set("*") + return + + self.object_mapping[module].update(new_objects) diff --git a/libcst/codemod/visitors/_gather_string_annotation_names.py b/libcst/codemod/visitors/_gather_string_annotation_names.py index b7268ffc..0f1b926b 100644 --- a/libcst/codemod/visitors/_gather_string_annotation_names.py +++ b/libcst/codemod/visitors/_gather_string_annotation_names.py @@ -44,11 +44,6 @@ class GatherNamesFromStringAnnotationsVisitor(ContextAwareVisitor): def leave_Annotation(self, original_node: cst.Annotation) -> None: self._annotation_stack.pop() - def visit_Subscript(self, node: cst.Subscript) -> bool: - qnames = self.get_metadata(QualifiedNameProvider, node) - # A Literal["foo"] should not be interpreted as a use of the symbol "foo". - return not any(qn.name == "typing.Literal" for qn in qnames) - def visit_Call(self, node: cst.Call) -> bool: qnames = self.get_metadata(QualifiedNameProvider, node) if any(qn.name in self._typing_functions for qn in qnames): @@ -76,11 +71,7 @@ class GatherNamesFromStringAnnotationsVisitor(ContextAwareVisitor): value = node.evaluated_value if value is None: return - try: - mod = cst.parse_module(value) - except cst.ParserSyntaxError: - # Not all strings inside a type annotation are meant to be valid Python code. - return + mod = cst.parse_module(value) extracted_nodes = m.extractall( mod, m.Name( diff --git a/libcst/codemod/visitors/_gather_unused_imports.py b/libcst/codemod/visitors/_gather_unused_imports.py index 01243cae..d6211509 100644 --- a/libcst/codemod/visitors/_gather_unused_imports.py +++ b/libcst/codemod/visitors/_gather_unused_imports.py @@ -108,7 +108,7 @@ class GatherUnusedImportsVisitor(ContextAwareVisitor): Override this in a subclass for additional filtering. """ unused_imports = set() - for alias, parent in candidates: + for (alias, parent) in candidates: scope = self.get_metadata(ScopeProvider, parent) if scope is None: continue diff --git a/libcst/codemod/visitors/_imports.py b/libcst/codemod/visitors/_imports.py index f3c1c305..5a703112 100644 --- a/libcst/codemod/visitors/_imports.py +++ b/libcst/codemod/visitors/_imports.py @@ -6,7 +6,7 @@ from dataclasses import dataclass, replace from typing import Optional -from libcst.helpers import get_absolute_module_from_package +from libcst.helpers import get_absolute_module @dataclass(frozen=True) @@ -31,15 +31,13 @@ class ImportItem: def module(self) -> str: return "." * self.relative + self.module_name - def resolve_relative(self, package_name: Optional[str]) -> "ImportItem": + def resolve_relative(self, base_module: Optional[str]) -> "ImportItem": """Return an ImportItem with an absolute module name if possible.""" mod = self # `import ..a` -> `from .. import a` if mod.relative and mod.obj_name is None: mod = replace(mod, module_name="", obj_name=mod.module_name) - if package_name is None: + if base_module is None: return mod - m = get_absolute_module_from_package( - package_name, mod.module_name or None, self.relative - ) + m = get_absolute_module(base_module, mod.module_name or None, self.relative) return mod if m is None else replace(mod, module_name=m, relative=0) diff --git a/libcst/codemod/visitors/_remove_imports.py b/libcst/codemod/visitors/_remove_imports.py index b625ee60..4c53ef19 100644 --- a/libcst/codemod/visitors/_remove_imports.py +++ b/libcst/codemod/visitors/_remove_imports.py @@ -6,14 +6,10 @@ from typing import Any, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union import libcst as cst -from libcst import CSTLogicError from libcst.codemod._context import CodemodContext from libcst.codemod._visitor import ContextAwareTransformer, ContextAwareVisitor from libcst.codemod.visitors._gather_unused_imports import GatherUnusedImportsVisitor -from libcst.helpers import ( - get_absolute_module_from_package_for_import, - get_full_name_for_node, -) +from libcst.helpers import get_absolute_module_for_import, get_full_name_for_node from libcst.metadata import Assignment, ProviderT, ScopeProvider @@ -42,11 +38,11 @@ class RemovedNodeVisitor(ContextAwareVisitor): # We don't handle removing this, so ignore it. return - module_name = get_absolute_module_from_package_for_import( - self.context.full_package_name, import_node + module_name = get_absolute_module_for_import( + self.context.full_module_name, import_node ) if module_name is None: - raise ValueError("Cannot look up absolute module from relative import!") + raise Exception("Cannot look up absolute module from relative import!") # We know any local names will refer to this as an alias if # there is one, and as the original name if there is not one @@ -73,9 +69,7 @@ class RemovedNodeVisitor(ContextAwareVisitor): # Look up the scope for this node, remove the import that caused it to exist. metadata_wrapper = self.context.wrapper if metadata_wrapper is None: - raise ValueError( - "Cannot look up import, metadata is not computed for node!" - ) + raise Exception("Cannot look up import, metadata is not computed for node!") scope_provider = metadata_wrapper.resolve(ScopeProvider) try: scope = scope_provider[node] @@ -188,7 +182,7 @@ class RemoveImportsVisitor(ContextAwareTransformer): ) -> List[Tuple[str, Optional[str], Optional[str]]]: unused_imports = context.scratch.get(RemoveImportsVisitor.CONTEXT_KEY, []) if not isinstance(unused_imports, list): - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") return unused_imports @staticmethod @@ -254,11 +248,9 @@ class RemoveImportsVisitor(ContextAwareTransformer): if isinstance(names, cst.ImportStar): # We don't handle removing this, so ignore it. return - module_name = get_absolute_module_from_package_for_import( - context.full_package_name, node - ) + module_name = get_absolute_module_for_import(context.full_module_name, node) if module_name is None: - raise ValueError("Cannot look up absolute module from relative import!") + raise Exception("Cannot look up absolute module from relative import!") for import_alias in names: RemoveImportsVisitor.remove_unused_import( context, @@ -421,8 +413,8 @@ class RemoveImportsVisitor(ContextAwareTransformer): return updated_node # Make sure we actually know the absolute module. - module_name = get_absolute_module_from_package_for_import( - self.context.full_package_name, updated_node + module_name = get_absolute_module_for_import( + self.context.full_module_name, updated_node ) if module_name is None or module_name not in self.unused_obj_imports: # This node isn't on our list of todos, so let's bail. diff --git a/libcst/codemod/visitors/tests/test_add_imports.py b/libcst/codemod/visitors/tests/test_add_imports.py index 613da907..6a88b335 100644 --- a/libcst/codemod/visitors/tests/test_add_imports.py +++ b/libcst/codemod/visitors/tests/test_add_imports.py @@ -8,6 +8,7 @@ from libcst.codemod.visitors import AddImportsVisitor, ImportItem class TestAddImportsCodemod(CodemodTest): + TRANSFORM = AddImportsVisitor def test_noop(self) -> None: @@ -589,9 +590,7 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("a.b.c", "D", None)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_add_object_relative_modify_simple(self) -> None: @@ -622,9 +621,7 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("a.b.c", "D", None)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_import_order(self) -> None: @@ -647,9 +644,7 @@ class TestAddImportsCodemod(CodemodTest): ImportItem("a", "c", None), ImportItem("a", "d", "x"), ], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_add_explicit_relative(self) -> None: @@ -764,9 +759,7 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("c", "D", None, 2)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_add_object_explicit_relative_modify_simple(self) -> None: @@ -797,9 +790,7 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("c", "D", None, 2)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_add_object_resolve_explicit_relative_modify_simple(self) -> None: @@ -830,9 +821,7 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("c", "D", None, 2)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_add_object_resolve_dotted_relative_modify_simple(self) -> None: @@ -863,9 +852,7 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("..c", "D", None)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_import_in_docstring_module(self) -> None: @@ -886,143 +873,5 @@ class TestAddImportsCodemod(CodemodTest): before, after, [ImportItem("__future__", "annotations", None)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), - ) - - def test_import_in_module_with_standalone_string_not_a_docstring( - self, - ) -> None: - """ - The import should be added after the __future__ imports. - """ - before = """ - from __future__ import annotations - from __future__ import division - - '''docstring.''' - def func(): - pass - """ - after = """ - from __future__ import annotations - from __future__ import division - import typing - - '''docstring.''' - def func(): - pass - """ - - self.assertCodemod( - before, - after, - [ImportItem("typing", None, None)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), - ) - - def test_add_at_first_block(self) -> None: - """ - Should add the import only at the end of the first import block. - """ - - before = """ - import a - import b - - e() - - import c - import d - """ - - after = """ - import a - import b - import e - - e() - - import c - import d - """ - - self.assertCodemod(before, after, [ImportItem("e", None, None)]) - - def test_add_no_import_block_before_statement(self) -> None: - """ - Should add the import before the call. - """ - - before = """ - '''docstring''' - e() - import a - import b - """ - - after = """ - '''docstring''' - import c - - e() - import a - import b - """ - - self.assertCodemod(before, after, [ImportItem("c", None, None)]) - - def test_do_not_add_existing(self) -> None: - """ - Should not add the new object import at existing import since it's not at the top - """ - - before = """ - '''docstring''' - e() - import a - import b - from c import f - """ - - after = """ - '''docstring''' - from c import e - - e() - import a - import b - from c import f - """ - - self.assertCodemod(before, after, [ImportItem("c", "e", None)]) - - def test_add_existing_at_top(self) -> None: - """ - Should add new import at exisitng from import at top - """ - - before = """ - '''docstring''' - from c import d - e() - import a - import b - from c import f - """ - - after = """ - '''docstring''' - from c import e, x, d - e() - import a - import b - from c import f - """ - - self.assertCodemod( - before, after, [ImportItem("c", "x", None), ImportItem("c", "e", None)] + context_override=CodemodContext(full_module_name="a.b.foobar"), ) diff --git a/libcst/codemod/visitors/tests/test_apply_type_annotations.py b/libcst/codemod/visitors/tests/test_apply_type_annotations.py index e7b25124..150e996a 100644 --- a/libcst/codemod/visitors/tests/test_apply_type_annotations.py +++ b/libcst/codemod/visitors/tests/test_apply_type_annotations.py @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) 2016-present, Meta Platforms, Inc. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. @@ -61,28 +61,6 @@ class TestApplyAnnotationsVisitor(CodemodTest): ) self.assertCodemod(before, after, context_override=context) - def run_test_case_twice( - self, - stub: str, - before: str, - after: str, - ) -> None: - context = CodemodContext() - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, parse_module(textwrap.dedent(stub.rstrip())) - ) - r1 = ApplyTypeAnnotationsVisitor(context).transform_module( - parse_module(textwrap.dedent(before.rstrip())) - ) - - context = CodemodContext() - ApplyTypeAnnotationsVisitor.store_stub_in_context( - context, parse_module(textwrap.dedent(stub.rstrip())) - ) - r2 = ApplyTypeAnnotationsVisitor(context).transform_module(r1) - assert r1.code == textwrap.dedent(after.rstrip()) - assert r2.code == textwrap.dedent(after.rstrip()) - @data_provider( { "simple": ( @@ -176,29 +154,6 @@ class TestApplyAnnotationsVisitor(CodemodTest): x2: Optional[T2] = None """, ), - "splitting_multi_assigns": ( - """ - a: str = ... - x: int = ... - y: int = ... - _: str = ... - z: str = ... - """, - """ - a = 'a' - x, y = 1, 2 - _, z = 'hello world'.split() - """, - """ - x: int - y: int - z: str - - a: str = 'a' - x, y = 1, 2 - _, z = 'hello world'.split() - """, - ), } ) def test_annotate_globals(self, stub: str, before: str, after: str) -> None: @@ -314,71 +269,10 @@ class TestApplyAnnotationsVisitor(CodemodTest): return returns_baz() """, ), - "with_as_import": ( - """ - from bar import A as B - - def foo(x: B): ... - """, - """ - def foo(x): - pass - """, - """ - from bar import A as B - - def foo(x: B): - pass - """, - ), - "with_conflicting_imported_symbols": ( - """ - import a.foo as bar - from b.c import Baz as B - import d - - def f(a: d.A, b: B) -> bar.B: ... - """, - """ - def f(a, b): - pass - """, - """ - import a.foo as bar - from b.c import Baz as B - from d import A - - def f(a: A, b: B) -> bar.B: - pass - """, - ), - "with_conflicts_between_imported_and_existing_symbols": ( - """ - from a import A - from b import B - - def f(x: A, y: B) -> None: ... - """, - """ - from b import A, B - - def f(x, y): - y = A(x) - z = B(y) - """, - """ - from b import A, B - import a - - def f(x: a.A, y: B) -> None: - y = A(x) - z = B(y) - """, - ), "with_nested_import": ( """ def foo(x: django.http.response.HttpResponse) -> str: - ... + pass """, """ def foo(x) -> str: @@ -491,7 +385,7 @@ class TestApplyAnnotationsVisitor(CodemodTest): ), "deeply_nested_example_with_multiline_annotation": ( """ - def foo(x: int) -> Union[ + def foo(x: int)-> Union[ Coroutine[Any, Any, django.http.response.HttpResponse], str ]: ... @@ -574,31 +468,6 @@ class TestApplyAnnotationsVisitor(CodemodTest): return respond(r, b) """, ), - "with_variadic_arguments": ( - """ - def incomplete_stubs_with_stars( - x: int, - *args, - **kwargs, - ) -> None: ... - """, - """ - def incomplete_stubs_with_stars( - x, - *args: P.args, - **kwargs: P.kwargs, - ): - pass - """, - """ - def incomplete_stubs_with_stars( - x: int, - *args: P.args, - **kwargs: P.kwargs, - ) -> None: - pass - """, - ), # test cases named with the REQUIRES_PREEXISTING prefix are verifying # that certain special cases work if the stub and the existing code # happen to align well, but none of these cases are guaranteed to work @@ -952,120 +821,6 @@ class TestApplyAnnotationsVisitor(CodemodTest): def test_adding_typed_dicts(self, stub: str, before: str, after: str) -> None: self.run_simple_test_case(stub=stub, before=before, after=after) - @data_provider( - { - "insert_new_TypeVar_not_in_source_file": ( - """ - from typing import Dict, TypeVar - - _KT = TypeVar('_KT') - _VT = TypeVar('_VT') - - class UserDict(Dict[_KT, _VT]): - def __init__(self, initialdata: Dict[_KT, _VT] = ...): ... - """, - """ - class UserDict: - def __init__(self, initialdata = None): - pass - """, - """ - from typing import Dict, TypeVar - - _KT = TypeVar('_KT') - _VT = TypeVar('_VT') - - class UserDict: - def __init__(self, initialdata: Dict[_KT, _VT] = None): - pass - """, - ), - "insert_only_used_TypeVar_not_already_in_source": ( - """ - from typing import Dict, TypeVar - - K = TypeVar('K') - V = TypeVar('V') - X = TypeVar('X') - - class UserDict(Dict[K, V]): - def __init__(self, initialdata: Dict[K, V] = ...): ... - """, - """ - from typing import TypeVar - - V = TypeVar('V') - - class UserDict: - def __init__(self, initialdata = None): - pass - - def f(x: V) -> V: - pass - """, - """ - from typing import Dict, TypeVar - - K = TypeVar('K') - - V = TypeVar('V') - - class UserDict: - def __init__(self, initialdata: Dict[K, V] = None): - pass - - def f(x: V) -> V: - pass - """, - ), - "insert_Generic_base_class": ( - """ - from typing import TypeVar - - T = TypeVar('T') - X = TypeVar('X') - - class B(A, Generic[T]): - def f(self, x: T) -> T: ... - """, - """ - from typing import TypeVar - - V = TypeVar('V') - - def f(x: V) -> V: - pass - - class A: - pass - - class B(A): - def f(self, x): - pass - """, - """ - from typing import TypeVar - - T = TypeVar('T') - - V = TypeVar('V') - - def f(x: V) -> V: - pass - - class A: - pass - - class B(A, Generic[T]): - def f(self, x: T) -> T: - pass - """, - ), - } - ) - def test_adding_typevars(self, stub: str, before: str, after: str) -> None: - self.run_simple_test_case(stub=stub, before=before, after=after) - @data_provider( { "required_positional_only_args": ( @@ -1141,201 +896,6 @@ class TestApplyAnnotationsVisitor(CodemodTest): overwrite_existing_annotations=True, ) - @data_provider( - { - "pep_604": ( - """ - def f(a: int | str, b: int | list[int | list[int | str]]) -> str: ... - """, - """ - def f(a, b): - return 'hello' - """, - """ - def f(a: int | str, b: int | list[int | list[int | str]]) -> str: - return 'hello' - """, - ), - "pep_604_import": ( - """ - from typing import Callable - from collections.abc import Sequence - def f(a: int | str, b: int | list[int | Callable[[str], Sequence]]) -> str: ... - """, - """ - def f(a, b): - return 'hello' - """, - """ - from collections.abc import Sequence - from typing import Callable - - def f(a: int | str, b: int | list[int | Callable[[str], Sequence]]) -> str: - return 'hello' - """, - ), - } - ) - def test_annotate_functions_pep_604( - self, stub: str, before: str, after: str - ) -> None: - self.run_test_case_with_flags( - stub=stub, - before=before, - after=after, - overwrite_existing_annotations=True, - ) - - @data_provider( - { - "import_inside_list": ( - """ - from typing import Callable - from collections.abc import Sequence - def f(a: Callable[[Sequence[int]], int], b: int) -> str: ... - """, - """ - def f(a, b): - return 'hello' - """, - """ - from collections.abc import Sequence - from typing import Callable - - def f(a: Callable[[Sequence[int]], int], b: int) -> str: - return 'hello' - """, - ), - } - ) - def test_annotate_function_nested_imports( - self, stub: str, before: str, after: str - ) -> None: - self.run_test_case_with_flags( - stub=stub, - before=before, - after=after, - overwrite_existing_annotations=True, - ) - - @data_provider( - { - "return_self": ( - """ - class Foo: - def f(self) -> Foo: ... - """, - """ - class Foo: - def f(self): - return self - """, - """ - class Foo: - def f(self) -> "Foo": - return self - """, - ), - "return_forward_reference": ( - """ - class Foo: - def f(self) -> Bar: ... - - class Bar: - ... - """, - """ - class Foo: - def f(self): - return Bar() - - class Bar: - pass - """, - """ - class Foo: - def f(self) -> "Bar": - return Bar() - - class Bar: - pass - """, - ), - "return_backward_reference": ( - """ - class Bar: - ... - - class Foo: - def f(self) -> Bar: ... - """, - """ - class Bar: - pass - - class Foo: - def f(self): - return Bar() - """, - """ - class Bar: - pass - - class Foo: - def f(self) -> Bar: - return Bar() - """, - ), - "return_undefined_name": ( - """ - class Foo: - def f(self) -> Bar: ... - """, - """ - class Foo: - def f(self): - return self - """, - """ - class Foo: - def f(self) -> Bar: - return self - """, - ), - "parameter_forward_reference": ( - """ - def f(input: Bar) -> None: ... - - class Bar: - ... - """, - """ - def f(input): - pass - - class Bar: - pass - """, - """ - def f(input: "Bar") -> None: - pass - - class Bar: - pass - """, - ), - } - ) - def test_annotate_with_forward_references( - self, stub: str, before: str, after: str - ) -> None: - self.run_test_case_with_flags( - stub=stub, - before=before, - after=after, - overwrite_existing_annotations=True, - ) - @data_provider( { "fully_annotated_with_untyped_stub": ( @@ -1775,241 +1335,3 @@ class TestApplyAnnotationsVisitor(CodemodTest): self.assertEqual( any_changes_applied, visitor.annotation_counts.any_changes_applied() ) - - @data_provider( - { - "always_qualify": ( - """ - from a import A - import b - def f(x: A, y: b.B) -> None: ... - """, - """ - def f(x, y): - pass - """, - """ - import a - import b - - def f(x: a.A, y: b.B) -> None: - pass - """, - ), - "never_qualify_typing": ( - """ - from a import A - from b import B - from typing import List - - def f(x: List[A], y: B[A]) -> None: ... - """, - """ - def f(x, y): - pass - """, - """ - import a - import b - from typing import List - - def f(x: List[a.A], y: b.B[a.A]) -> None: - pass - """, - ), - "preserve_explicit_from_import": ( - """ - from a import A - import b - def f(x: A, y: b.B) -> None: ... - """, - """ - from b import B - def f(x, y): - pass - """, - """ - from b import B - import a - - def f(x: a.A, y: B) -> None: - pass - """, - ), - } - ) - def test_signature_matching_with_always_qualify( - self, stub: str, before: str, after: str - ) -> None: - self.run_test_case_with_flags( - stub=stub, before=before, after=after, always_qualify_annotations=True - ) - - @data_provider( - { - "attribute": ( - """ - class C: - x: int - """, - """ - class C: - x = 0 - C.x = 1 - """, - """ - class C: - x: int = 0 - C.x = 1 - """, - ), - "subscript": ( - """ - d: dict[str, int] - """, - """ - d = {} - d["k"] = 0 - """, - """ - d: dict[str, int] = {} - d["k"] = 0 - """, - ), - "starred": ( - """ - a: int - b: list[int] - """, - """ - a, *b = [1, 2, 3] - """, - """ - a: int - b: list[int] - - a, *b = [1, 2, 3] - """, - ), - "name": ( - """ - a: int - """, - """ - a = 0 - """, - """ - a: int = 0 - """, - ), - "list": ( - """ - a: int - """, - """ - [a] = [0] - """, - """ - a: int - - [a] = [0] - """, - ), - "tuple": ( - """ - a: int - """, - """ - (a,) = [0] - """, - """ - a: int - - (a,) = [0] - """, - ), - } - ) - def test_valid_assign_expressions(self, stub: str, before: str, after: str) -> None: - self.run_simple_test_case(stub=stub, before=before, after=after) - - @data_provider( - { - "toplevel": ( - """ - x: int - """, - """ - x = 1 - x = 2 - """, - """ - x: int = 1 - x = 2 - """, - ), - "class": ( - """ - class A: - x: int - """, - """ - class A: - x = 1 - x = 2 - """, - """ - class A: - x: int = 1 - x = 2 - """, - ), - "mixed": ( - """ - x: int - class A: - x: int - """, - """ - x = 1 - class A: - x = 1 - x = 2 - """, - """ - x: int = 1 - class A: - x: int = 1 - x = 2 - """, - ), - } - ) - def test_no_duplicate_annotations(self, stub: str, before: str, after: str) -> None: - self.run_simple_test_case(stub=stub, before=before, after=after) - - @data_provider( - { - "qualifier_jank": ( - """ - from module.submodule import B - M: B - class Foo: ... - """, - """ - from module import B - M = B() - class Foo: pass - """, - """ - from module import B - import module.submodule - - M: module.submodule.B = B() - class Foo: pass - """, - ), - } - ) - def test_idempotent(self, stub: str, before: str, after: str) -> None: - self.run_test_case_twice(stub=stub, before=before, after=after) diff --git a/libcst/codemod/visitors/tests/test_gather_global_names.py b/libcst/codemod/visitors/tests/test_gather_global_names.py deleted file mode 100644 index 8a7a7b8b..00000000 --- a/libcst/codemod/visitors/tests/test_gather_global_names.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# -from libcst import parse_module -from libcst.codemod import CodemodContext, CodemodTest -from libcst.codemod.visitors import GatherGlobalNamesVisitor -from libcst.testing.utils import UnitTest - - -class TestGatherGlobalNamesVisitor(UnitTest): - def gather_global_names(self, code: str) -> GatherGlobalNamesVisitor: - transform_instance = GatherGlobalNamesVisitor( - CodemodContext(full_module_name="a.b.foobar") - ) - input_tree = parse_module(CodemodTest.make_fixture_data(code)) - input_tree.visit(transform_instance) - return transform_instance - - def test_gather_nothing(self) -> None: - code = """ - from a import b - b() - """ - gatherer = self.gather_global_names(code) - self.assertEqual(gatherer.global_names, set()) - self.assertEqual(gatherer.class_names, set()) - self.assertEqual(gatherer.function_names, set()) - - def test_globals(self) -> None: - code = """ - x = 1 - y = 2 - def foo(): pass - class Foo: pass - """ - gatherer = self.gather_global_names(code) - self.assertEqual(gatherer.global_names, {"x", "y"}) - self.assertEqual(gatherer.class_names, {"Foo"}) - self.assertEqual(gatherer.function_names, {"foo"}) - - def test_omit_nested(self) -> None: - code = """ - def foo(): - x = 1 - - class Foo: - def method(self): pass - """ - gatherer = self.gather_global_names(code) - self.assertEqual(gatherer.global_names, set()) - self.assertEqual(gatherer.class_names, {"Foo"}) - self.assertEqual(gatherer.function_names, {"foo"}) diff --git a/libcst/codemod/visitors/tests/test_gather_imports.py b/libcst/codemod/visitors/tests/test_gather_imports.py index 4fbdbad2..3e5d6b99 100644 --- a/libcst/codemod/visitors/tests/test_gather_imports.py +++ b/libcst/codemod/visitors/tests/test_gather_imports.py @@ -12,7 +12,7 @@ from libcst.testing.utils import UnitTest class TestGatherImportsVisitor(UnitTest): def gather_imports(self, code: str) -> GatherImportsVisitor: transform_instance = GatherImportsVisitor( - CodemodContext(full_module_name="a.b.foobar", full_package_name="a.b") + CodemodContext(full_module_name="a.b.foobar") ) input_tree = parse_module(CodemodTest.make_fixture_data(code)) input_tree.visit(transform_instance) diff --git a/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py b/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py index d3c622a3..f8a11fcc 100644 --- a/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py +++ b/libcst/codemod/visitors/tests/test_gather_string_annotation_names.py @@ -80,14 +80,3 @@ class TestGatherNamesFromStringAnnotationsVisitor(UnitTest): visitor.names, {"api", "api.http_exceptions", "api.http_exceptions.HttpException"}, ) - - def test_literals(self) -> None: - visitor = self.gather_names( - """ - from typing import Literal - a: Literal["in"] - b: list[Literal["1x"]] - c: Literal["Any"] - """ - ) - self.assertEqual(visitor.names, set()) diff --git a/libcst/codemod/visitors/tests/test_remove_imports.py b/libcst/codemod/visitors/tests/test_remove_imports.py index 6e51c515..93e5d7a5 100644 --- a/libcst/codemod/visitors/tests/test_remove_imports.py +++ b/libcst/codemod/visitors/tests/test_remove_imports.py @@ -17,6 +17,7 @@ from libcst.testing.utils import data_provider class TestRemoveImportsCodemod(CodemodTest): + TRANSFORM = RemoveImportsVisitor def test_noop(self) -> None: @@ -418,9 +419,7 @@ class TestRemoveImportsCodemod(CodemodTest): before, after, [("a.b.c", "qux", None)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_dont_remove_inuse_importfrom_relative(self) -> None: @@ -447,9 +446,7 @@ class TestRemoveImportsCodemod(CodemodTest): before, after, [("a.b.c", "qux", None)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_dont_remove_wrong_importfrom_relative(self) -> None: @@ -476,9 +473,7 @@ class TestRemoveImportsCodemod(CodemodTest): before, after, [("a.b.d", "qux", None)], - context_override=CodemodContext( - full_module_name="a.b.foobar", full_package_name="a.b" - ), + context_override=CodemodContext(full_module_name="a.b.foobar"), ) def test_remove_import_complex(self) -> None: @@ -756,6 +751,7 @@ class TestRemoveImportsCodemod(CodemodTest): """ class RemoveBarTransformer(VisitorBasedCodemodCommand): + METADATA_DEPENDENCIES = (QualifiedNameProvider, ScopeProvider) @m.leave( @@ -819,6 +815,7 @@ class TestRemoveImportsCodemod(CodemodTest): """ class RemoveImportTransformer(VisitorBasedCodemodCommand): + METADATA_DEPENDENCIES = (QualifiedNameProvider, ScopeProvider) def visit_ImportFrom(self, node: cst.ImportFrom) -> None: @@ -857,6 +854,7 @@ class TestRemoveImportsCodemod(CodemodTest): """ class RemoveImportTransformer(VisitorBasedCodemodCommand): + METADATA_DEPENDENCIES = (QualifiedNameProvider, ScopeProvider) def visit_Import(self, node: cst.Import) -> None: @@ -888,6 +886,7 @@ class TestRemoveImportsCodemod(CodemodTest): """ class RemoveImportTransformer(VisitorBasedCodemodCommand): + METADATA_DEPENDENCIES = (QualifiedNameProvider, ScopeProvider) def visit_ImportFrom(self, node: cst.ImportFrom) -> None: diff --git a/libcst/display/__init__.py b/libcst/display/__init__.py deleted file mode 100644 index 49365a58..00000000 --- a/libcst/display/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from libcst.display.graphviz import dump_graphviz -from libcst.display.text import dump - -__all__ = [ - "dump", - "dump_graphviz", -] diff --git a/libcst/display/graphviz.py b/libcst/display/graphviz.py deleted file mode 100644 index e6b5b748..00000000 --- a/libcst/display/graphviz.py +++ /dev/null @@ -1,187 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from __future__ import annotations - -import textwrap -from collections.abc import Sequence - -from libcst import CSTNode -from libcst.helpers import filter_node_fields - - -_syntax_style = ', color="#777777", fillcolor="#eeeeee"' -_value_style = ', color="#3e99ed", fillcolor="#b8d9f8"' - -node_style: dict[str, str] = { - "__default__": "", - "EmptyLine": _syntax_style, - "IndentedBlock": _syntax_style, - "SimpleStatementLine": _syntax_style, - "SimpleWhitespace": _syntax_style, - "TrailingWhitespace": _syntax_style, - "Newline": _syntax_style, - "Comma": _syntax_style, - "LeftParen": _syntax_style, - "RightParen": _syntax_style, - "LeftSquareBracket": _syntax_style, - "RightSquareBracket": _syntax_style, - "LeftCurlyBrace": _syntax_style, - "RightCurlyBrace": _syntax_style, - "BaseSmallStatement": _syntax_style, - "BaseCompoundStatement": _syntax_style, - "SimpleStatementSuite": _syntax_style, - "Colon": _syntax_style, - "Dot": _syntax_style, - "Semicolon": _syntax_style, - "ParenthesizedWhitespace": _syntax_style, - "BaseParenthesizableWhitespace": _syntax_style, - "Comment": _syntax_style, - "Name": _value_style, - "Integer": _value_style, - "Float": _value_style, - "Imaginary": _value_style, - "SimpleString": _value_style, - "FormattedStringText": _value_style, -} -"""Graphviz style for specific CST nodes""" - - -def _create_node_graphviz(node: CSTNode) -> str: - """Creates the graphviz representation of a CST node.""" - node_name = node.__class__.__qualname__ - - if node_name in node_style: - style = node_style[node_name] - else: - style = node_style["__default__"] - - # pyre-ignore[16]: the existence of node.value is checked before usage - if hasattr(node, "value") and isinstance(node.value, str): - line_break = r"\n" - quote = '"' - escaped_quote = r"\"" - value = f"{line_break}<{node.value.replace(quote, escaped_quote)}>" - style = style + ', shape="box"' - else: - value = "" - - return f'{id(node)} [label="{node_name}{value}"{style}]' - - -def _node_repr_recursive( - node: object, - *, - show_defaults: bool, - show_syntax: bool, - show_whitespace: bool, -) -> list[str]: - """Creates the graphviz representation of a CST node, - and of its child nodes.""" - if not isinstance(node, CSTNode): - return [] - - fields = filter_node_fields( - node, - show_defaults=show_defaults, - show_syntax=show_syntax, - show_whitespace=show_whitespace, - ) - - graphviz_lines: list[str] = [_create_node_graphviz(node)] - - for field in fields: - value = getattr(node, field.name) - if isinstance(value, CSTNode): - # Display a single node - graphviz_lines.append(f'{id(node)} -> {id(value)} [label="{field.name}"]') - graphviz_lines.extend( - _node_repr_recursive( - value, - show_defaults=show_defaults, - show_syntax=show_syntax, - show_whitespace=show_whitespace, - ) - ) - continue - - if isinstance(value, Sequence): - # Display a sequence of nodes - for index, child in enumerate(value): - if isinstance(child, CSTNode): - graphviz_lines.append( - rf'{id(node)} -> {id(child)} [label="{field.name}[{index}]"]' - ) - graphviz_lines.extend( - _node_repr_recursive( - child, - show_defaults=show_defaults, - show_syntax=show_syntax, - show_whitespace=show_whitespace, - ) - ) - - return graphviz_lines - - -def dump_graphviz( - node: object, - *, - show_defaults: bool = False, - show_syntax: bool = False, - show_whitespace: bool = False, -) -> str: - """ - Returns a string representation (in graphviz .dot style) of a CST node, - and its child nodes. - - Setting ``show_defaults`` to ``True`` will add fields regardless if their - value is different from the default value. - - Setting ``show_whitespace`` will add whitespace fields and setting - ``show_syntax`` will add syntax fields while respecting the value of - ``show_defaults``. - """ - - graphviz_settings = textwrap.dedent( - r""" - layout=dot; - rankdir=TB; - splines=line; - ranksep=0.5; - nodesep=1.0; - dpi=300; - bgcolor=transparent; - node [ - style=filled, - color="#fb8d3f", - fontcolor="#4b4f54", - fillcolor="#fdd2b3", - fontname="Source Code Pro Semibold", - penwidth="2", - group=main, - ]; - edge [ - color="#999999", - fontcolor="#4b4f54", - fontname="Source Code Pro Semibold", - fontsize=12, - penwidth=2, - ]; - """[ - 1: - ] - ) - - return "\n".join( - ["digraph {", graphviz_settings] - + _node_repr_recursive( - node, - show_defaults=show_defaults, - show_syntax=show_syntax, - show_whitespace=show_whitespace, - ) - + ["}"] - ) diff --git a/libcst/display/tests/__init__.py b/libcst/display/tests/__init__.py deleted file mode 100644 index 7bec24cb..00000000 --- a/libcst/display/tests/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. diff --git a/libcst/display/tests/test_dump_graphviz.py b/libcst/display/tests/test_dump_graphviz.py deleted file mode 100644 index 17ce231f..00000000 --- a/libcst/display/tests/test_dump_graphviz.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from __future__ import annotations - -from textwrap import dedent -from typing import TYPE_CHECKING - -from libcst import parse_module -from libcst.display import dump_graphviz -from libcst.testing.utils import UnitTest - -if TYPE_CHECKING: - from libcst import Module - - -class CSTDumpGraphvizTest(UnitTest): - """Check dump_graphviz contains CST nodes.""" - - source_code: str = dedent( - r""" - def foo(a: str) -> None: - pass ; - pass - return - """[ - 1: - ] - ) - cst: Module - - @classmethod - def setUpClass(cls) -> None: - cls.cst = parse_module(cls.source_code) - - def _assert_node(self, node_name: str, graphviz_str: str) -> None: - self.assertIn( - node_name, graphviz_str, f"No node {node_name} found in graphviz_dump" - ) - - def _check_essential_nodes_in_tree(self, graphviz_str: str) -> None: - # Check CST nodes are present in graphviz string - self._assert_node("Module", graphviz_str) - self._assert_node("FunctionDef", graphviz_str) - self._assert_node("Name", graphviz_str) - self._assert_node("Parameters", graphviz_str) - self._assert_node("Param", graphviz_str) - self._assert_node("Annotation", graphviz_str) - self._assert_node("IndentedBlock", graphviz_str) - self._assert_node("SimpleStatementLine", graphviz_str) - self._assert_node("Pass", graphviz_str) - self._assert_node("Return", graphviz_str) - - # Check CST values are present in graphviz string - self._assert_node("", graphviz_str) - self._assert_node("", graphviz_str) - self._assert_node("", graphviz_str) - self._assert_node("", graphviz_str) - - def test_essential_tree(self) -> None: - """Check essential nodes are present in the CST graphviz dump.""" - graphviz_str = dump_graphviz(self.cst) - self._check_essential_nodes_in_tree(graphviz_str) - - def test_full_tree(self) -> None: - """Check all nodes are present in the CST graphviz dump.""" - graphviz_str = dump_graphviz( - self.cst, - show_whitespace=True, - show_defaults=True, - show_syntax=True, - ) - self._check_essential_nodes_in_tree(graphviz_str) - - self._assert_node("Semicolon", graphviz_str) - self._assert_node("SimpleWhitespace", graphviz_str) - self._assert_node("Newline", graphviz_str) - self._assert_node("TrailingWhitespace", graphviz_str) - - self._assert_node("<>", graphviz_str) - self._assert_node("< >", graphviz_str) diff --git a/libcst/display/text.py b/libcst/display/text.py deleted file mode 100644 index 0e270009..00000000 --- a/libcst/display/text.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from __future__ import annotations - -import dataclasses -from typing import List, Sequence - -from libcst import CSTLogicError, CSTNode -from libcst.helpers import filter_node_fields - -_DEFAULT_INDENT: str = " " - - -def _node_repr_recursive( # noqa: C901 - node: object, - *, - indent: str = _DEFAULT_INDENT, - show_defaults: bool = False, - show_syntax: bool = False, - show_whitespace: bool = False, -) -> List[str]: - if isinstance(node, CSTNode): - # This is a CSTNode, we must pretty-print it. - fields: Sequence[dataclasses.Field[CSTNode]] = filter_node_fields( - node=node, - show_defaults=show_defaults, - show_syntax=show_syntax, - show_whitespace=show_whitespace, - ) - - tokens: List[str] = [node.__class__.__name__] - - if len(fields) == 0: - tokens.append("()") - else: - tokens.append("(\n") - - for field in fields: - child_tokens: List[str] = [field.name, "="] - value = getattr(node, field.name) - - if isinstance(value, (str, bytes)) or not isinstance(value, Sequence): - # Render out the node contents - child_tokens.extend( - _node_repr_recursive( - value, - indent=indent, - show_whitespace=show_whitespace, - show_defaults=show_defaults, - show_syntax=show_syntax, - ) - ) - elif isinstance(value, Sequence): - # Render out a list of individual nodes - if len(value) > 0: - child_tokens.append("[\n") - list_tokens: List[str] = [] - - last_value = len(value) - 1 - for j, v in enumerate(value): - list_tokens.extend( - _node_repr_recursive( - v, - indent=indent, - show_whitespace=show_whitespace, - show_defaults=show_defaults, - show_syntax=show_syntax, - ) - ) - if j != last_value: - list_tokens.append(",\n") - else: - list_tokens.append(",") - - split_by_line = "".join(list_tokens).split("\n") - child_tokens.append( - "\n".join(f"{indent}{t}" for t in split_by_line) - ) - - child_tokens.append("\n]") - else: - child_tokens.append("[]") - else: - raise CSTLogicError("Logic error!") - - # Handle indentation and trailing comma. - split_by_line = "".join(child_tokens).split("\n") - tokens.append("\n".join(f"{indent}{t}" for t in split_by_line)) - tokens.append(",\n") - - tokens.append(")") - - return tokens - else: - # This is a python value, just return the repr - return [repr(node)] - - -def dump( - node: CSTNode, - *, - indent: str = _DEFAULT_INDENT, - show_defaults: bool = False, - show_syntax: bool = False, - show_whitespace: bool = False, -) -> str: - """ - Returns a string representation of the node that contains minimal differences - from the default contruction of the node while also hiding whitespace and - syntax fields. - - Setting ``show_defaults`` to ``True`` will add fields regardless if their - value is different from the default value. - - Setting ``show_whitespace`` will add whitespace fields and setting - ``show_syntax`` will add syntax fields while respecting the value of - ``show_defaults``. - - When all keyword args are set to true, the output of this function is - indentical to the __repr__ method of the node. - """ - return "".join( - _node_repr_recursive( - node, - indent=indent, - show_defaults=show_defaults, - show_syntax=show_syntax, - show_whitespace=show_whitespace, - ) - ) diff --git a/libcst/helpers/__init__.py b/libcst/helpers/__init__.py index 817acc39..ccd12c72 100644 --- a/libcst/helpers/__init__.py +++ b/libcst/helpers/__init__.py @@ -4,6 +4,11 @@ # LICENSE file in the root directory of this source tree. # +from libcst.helpers._statement import ( + get_absolute_module, + get_absolute_module_for_import, + get_absolute_module_for_import_or_raise, +) from libcst.helpers._template import ( parse_template_expression, parse_template_module, @@ -14,34 +19,12 @@ from libcst.helpers.expression import ( get_full_name_for_node, get_full_name_for_node_or_raise, ) -from libcst.helpers.module import ( - calculate_module_and_package, - get_absolute_module, - get_absolute_module_for_import, - get_absolute_module_for_import_or_raise, - get_absolute_module_from_package, - get_absolute_module_from_package_for_import, - get_absolute_module_from_package_for_import_or_raise, - insert_header_comments, - ModuleNameAndPackage, -) -from libcst.helpers.node_fields import ( - filter_node_fields, - get_field_default_value, - get_node_fields, - is_default_node_field, - is_syntax_node_field, - is_whitespace_node_field, -) +from libcst.helpers.module import insert_header_comments __all__ = [ - "calculate_module_and_package", "get_absolute_module", "get_absolute_module_for_import", "get_absolute_module_for_import_or_raise", - "get_absolute_module_from_package", - "get_absolute_module_from_package_for_import", - "get_absolute_module_from_package_for_import_or_raise", "get_full_name_for_node", "get_full_name_for_node_or_raise", "ensure_type", @@ -49,11 +32,4 @@ __all__ = [ "parse_template_module", "parse_template_statement", "parse_template_expression", - "ModuleNameAndPackage", - "get_node_fields", - "get_field_default_value", - "is_whitespace_node_field", - "is_syntax_node_field", - "is_default_node_field", - "filter_node_fields", ] diff --git a/libcst/helpers/_statement.py b/libcst/helpers/_statement.py new file mode 100644 index 00000000..f62a5eb8 --- /dev/null +++ b/libcst/helpers/_statement.py @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +from typing import Optional + +import libcst as cst +from libcst.helpers.expression import get_full_name_for_node + + +def get_absolute_module( + current_module: Optional[str], module_name: Optional[str], num_dots: int +) -> Optional[str]: + if num_dots == 0: + # This is an absolute import, so the module is correct. + return module_name + if current_module is None: + # We don't actually have the current module available, so we can't compute + # the absolute module from relative. + return None + # We have the current module, as well as the relative, let's compute the base. + modules = current_module.split(".") + if len(modules) < num_dots: + # This relative import goes past the base of the repository, so we can't calculate it. + return None + base_module = ".".join(modules[:-num_dots]) + # Finally, if the module name was supplied, append it to the end. + if module_name is not None: + # If we went all the way to the top, the base module should be empty, so we + # should return the relative bit as absolute. Otherwise, combine the base + # module and module name using a dot separator. + base_module = ( + f"{base_module}.{module_name}" if len(base_module) > 0 else module_name + ) + # If they tried to import all the way to the root, return None. Otherwise, + # return the module itself. + return base_module if len(base_module) > 0 else None + + +def get_absolute_module_for_import( + current_module: Optional[str], import_node: cst.ImportFrom +) -> Optional[str]: + # First, let's try to grab the module name, regardless of relative status. + module = import_node.module + module_name = get_full_name_for_node(module) if module is not None else None + # Now, get the relative import location if it exists. + num_dots = len(import_node.relative) + return get_absolute_module(current_module, module_name, num_dots) + + +def get_absolute_module_for_import_or_raise( + current_module: Optional[str], import_node: cst.ImportFrom +) -> str: + module = get_absolute_module_for_import(current_module, import_node) + if module is None: + raise Exception(f"Unable to compute absolute module for {import_node}") + return module diff --git a/libcst/helpers/_template.py b/libcst/helpers/_template.py index e205e0af..e3f915a5 100644 --- a/libcst/helpers/_template.py +++ b/libcst/helpers/_template.py @@ -45,12 +45,12 @@ def unmangled_name(var: str) -> Optional[str]: def mangle_template(template: str, template_vars: Set[str]) -> str: if TEMPLATE_PREFIX in template or TEMPLATE_SUFFIX in template: - raise ValueError("Cannot parse a template containing reserved strings") + raise Exception("Cannot parse a template containing reserved strings") for var in template_vars: original = f"{{{var}}}" if original not in template: - raise ValueError( + raise Exception( f'Template string is missing a reference to "{var}" referred to in kwargs' ) template = template.replace(original, mangled_name(var)) @@ -142,7 +142,7 @@ class TemplateTransformer(cst.CSTTransformer): name for name in template_replacements if name not in supported_vars } if unsupported_vars: - raise ValueError( + raise Exception( f'Template replacement for "{next(iter(unsupported_vars))}" is unsupported' ) @@ -350,7 +350,7 @@ class TemplateChecker(cst.CSTVisitor): def visit_Name(self, node: cst.Name) -> None: for var in self.template_vars: if node.value == mangled_name(var): - raise ValueError(f'Template variable "{var}" was not replaced properly') + raise Exception(f'Template variable "{var}" was not replaced properly') def unmangle_nodes( @@ -424,8 +424,8 @@ def parse_template_statement( if not isinstance( new_statement, (cst.SimpleStatementLine, cst.BaseCompoundStatement) ): - raise TypeError( - f"Expected a statement but got a {new_statement.__class__.__qualname__}!" + raise Exception( + f"Expected a statement but got a {new_statement.__class__.__name__}!" ) new_statement.visit(TemplateChecker({name for name in template_replacements})) return new_statement diff --git a/libcst/helpers/common.py b/libcst/helpers/common.py index dee73aa4..0965abeb 100644 --- a/libcst/helpers/common.py +++ b/libcst/helpers/common.py @@ -3,12 +3,12 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -from typing import Type, TypeVar +from typing import Type -T = TypeVar("T") +from libcst._types import CSTNodeT -def ensure_type(node: object, nodetype: Type[T]) -> T: +def ensure_type(node: object, nodetype: Type[CSTNodeT]) -> CSTNodeT: """ Takes any python object, and a LibCST :class:`~libcst.CSTNode` subclass and refines the type of the python object. This is most useful when you already @@ -19,7 +19,7 @@ def ensure_type(node: object, nodetype: Type[T]) -> T: """ if not isinstance(node, nodetype): - raise ValueError( - f"Expected a {nodetype.__name__} but got a {node.__class__.__qualname__}!" + raise Exception( + f"Expected a {nodetype.__name__} but got a {node.__class__.__name__}!" ) return node diff --git a/libcst/helpers/expression.py b/libcst/helpers/expression.py index 5ae016cf..beb5f324 100644 --- a/libcst/helpers/expression.py +++ b/libcst/helpers/expression.py @@ -38,5 +38,5 @@ def get_full_name_for_node_or_raise(node: Union[str, cst.CSTNode]) -> str: """ full_name = get_full_name_for_node(node) if full_name is None: - raise ValueError(f"Not able to parse full name for: {node}") + raise Exception(f"Not able to parse full name for: {node}") return full_name diff --git a/libcst/helpers/matchers.py b/libcst/helpers/matchers.py deleted file mode 100644 index e641c43c..00000000 --- a/libcst/helpers/matchers.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# - -from dataclasses import fields, is_dataclass, MISSING - -from libcst import matchers -from libcst._nodes.base import CSTNode - - -def node_to_matcher( - node: CSTNode, *, match_syntactic_trivia: bool = False -) -> matchers.BaseMatcherNode: - """Convert a concrete node to a matcher.""" - if not is_dataclass(node): - raise ValueError(f"{node} is not a CSTNode") - - attrs = {} - for field in fields(node): - name = field.name - child = getattr(node, name) - if not match_syntactic_trivia and field.name.startswith("whitespace"): - # Not all nodes have whitespace fields, some have multiple, but they all - # start with whitespace* - child = matchers.DoNotCare() - elif field.default is not MISSING and child == field.default: - child = matchers.DoNotCare() - # pyre-ignore[29]: Union[MISSING_TYPE, ...] is not a function. - elif field.default_factory is not MISSING and child == field.default_factory(): - child = matchers.DoNotCare() - elif isinstance(child, (list, tuple)): - child = type(child)( - node_to_matcher(item, match_syntactic_trivia=match_syntactic_trivia) - for item in child - ) - elif hasattr(matchers, type(child).__name__): - child = node_to_matcher( - child, match_syntactic_trivia=match_syntactic_trivia - ) - attrs[name] = child - - matcher = getattr(matchers, type(node).__name__) - return matcher(**attrs) diff --git a/libcst/helpers/module.py b/libcst/helpers/module.py index 2b2973bf..50e42ff7 100644 --- a/libcst/helpers/module.py +++ b/libcst/helpers/module.py @@ -3,17 +3,13 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -from dataclasses import dataclass from itertools import islice -from pathlib import Path, PurePath -from typing import List, Optional +from typing import List -from libcst import Comment, EmptyLine, ImportFrom, Module -from libcst._types import StrPath -from libcst.helpers.expression import get_full_name_for_node +import libcst -def insert_header_comments(node: Module, comments: List[str]) -> Module: +def insert_header_comments(node: libcst.Module, comments: List[str]) -> libcst.Module: """ Insert comments after last non-empty line in header. Use this to insert one or more comments after any copyright preamble in a :class:`~libcst.Module`. Each comment in @@ -29,136 +25,9 @@ def insert_header_comments(node: Module, comments: List[str]) -> Module: comment_lines = islice(node.header, last_comment_index + 1) empty_lines = islice(node.header, last_comment_index + 1, None) - inserted_lines = [EmptyLine(comment=Comment(value=comment)) for comment in comments] + inserted_lines = [ + libcst.EmptyLine(comment=libcst.Comment(value=comment)) for comment in comments + ] # pyre-fixme[60]: Concatenation not yet support for multiple variadic tuples: # `*comment_lines, *inserted_lines, *empty_lines`. return node.with_changes(header=(*comment_lines, *inserted_lines, *empty_lines)) - - -def get_absolute_module( - current_module: Optional[str], module_name: Optional[str], num_dots: int -) -> Optional[str]: - if num_dots == 0: - # This is an absolute import, so the module is correct. - return module_name - if current_module is None: - # We don't actually have the current module available, so we can't compute - # the absolute module from relative. - return None - # We have the current module, as well as the relative, let's compute the base. - modules = current_module.split(".") - if len(modules) < num_dots: - # This relative import goes past the base of the repository, so we can't calculate it. - return None - base_module = ".".join(modules[:-num_dots]) - # Finally, if the module name was supplied, append it to the end. - if module_name is not None: - # If we went all the way to the top, the base module should be empty, so we - # should return the relative bit as absolute. Otherwise, combine the base - # module and module name using a dot separator. - base_module = ( - f"{base_module}.{module_name}" if len(base_module) > 0 else module_name - ) - # If they tried to import all the way to the root, return None. Otherwise, - # return the module itself. - return base_module if len(base_module) > 0 else None - - -def get_absolute_module_for_import( - current_module: Optional[str], import_node: ImportFrom -) -> Optional[str]: - # First, let's try to grab the module name, regardless of relative status. - module = import_node.module - module_name = get_full_name_for_node(module) if module is not None else None - # Now, get the relative import location if it exists. - num_dots = len(import_node.relative) - return get_absolute_module(current_module, module_name, num_dots) - - -def get_absolute_module_for_import_or_raise( - current_module: Optional[str], import_node: ImportFrom -) -> str: - module = get_absolute_module_for_import(current_module, import_node) - if module is None: - raise ValueError(f"Unable to compute absolute module for {import_node}") - return module - - -def get_absolute_module_from_package( - current_package: Optional[str], module_name: Optional[str], num_dots: int -) -> Optional[str]: - if num_dots == 0: - # This is an absolute import, so the module is correct. - return module_name - if current_package is None or current_package == "": - # We don't actually have the current module available, so we can't compute - # the absolute module from relative. - return None - - # see importlib._bootstrap._resolve_name - # https://github.com/python/cpython/blob/3.10/Lib/importlib/_bootstrap.py#L902 - bits = current_package.rsplit(".", num_dots - 1) - if len(bits) < num_dots: - return None - - base = bits[0] - return "{}.{}".format(base, module_name) if module_name else base - - -def get_absolute_module_from_package_for_import( - current_package: Optional[str], import_node: ImportFrom -) -> Optional[str]: - # First, let's try to grab the module name, regardless of relative status. - module = import_node.module - module_name = get_full_name_for_node(module) if module is not None else None - # Now, get the relative import location if it exists. - num_dots = len(import_node.relative) - return get_absolute_module_from_package(current_package, module_name, num_dots) - - -def get_absolute_module_from_package_for_import_or_raise( - current_package: Optional[str], import_node: ImportFrom -) -> str: - module = get_absolute_module_from_package_for_import(current_package, import_node) - if module is None: - raise ValueError(f"Unable to compute absolute module for {import_node}") - return module - - -@dataclass(frozen=True) -class ModuleNameAndPackage: - name: str - package: str - - -def calculate_module_and_package( - repo_root: StrPath, filename: StrPath, use_pyproject_toml: bool = False -) -> ModuleNameAndPackage: - # Given an absolute repo_root and an absolute filename, calculate the - # python module name for the file. - if use_pyproject_toml: - # But also look for pyproject.toml files, indicating nested packages in the repo. - abs_repo_root = Path(repo_root).resolve() - abs_filename = Path(filename).resolve() - package_root = abs_filename.parent - while package_root != abs_repo_root: - if (package_root / "pyproject.toml").exists(): - break - if package_root == package_root.parent: - break - package_root = package_root.parent - - relative_filename = abs_filename.relative_to(package_root) - else: - relative_filename = PurePath(filename).relative_to(repo_root) - relative_filename = relative_filename.with_suffix("") - - # handle special cases - if relative_filename.stem in ["__init__", "__main__"]: - relative_filename = relative_filename.parent - package = name = ".".join(relative_filename.parts) - else: - name = ".".join(relative_filename.parts) - package = ".".join(relative_filename.parts[:-1]) - - return ModuleNameAndPackage(name, package) diff --git a/libcst/helpers/node_fields.py b/libcst/helpers/node_fields.py deleted file mode 100644 index 418d6cbb..00000000 --- a/libcst/helpers/node_fields.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from __future__ import annotations - -import dataclasses -from typing import TYPE_CHECKING - -from libcst import IndentedBlock, Module -from libcst._nodes.deep_equals import deep_equals - -if TYPE_CHECKING: - from typing import Sequence - - from libcst import CSTNode - - -def get_node_fields(node: CSTNode) -> Sequence[dataclasses.Field[CSTNode]]: - """ - Returns the sequence of a given CST-node's fields. - """ - return dataclasses.fields(node) - - -def is_whitespace_node_field(node: CSTNode, field: dataclasses.Field[CSTNode]) -> bool: - """ - Returns True if a given CST-node's field is a whitespace-related field - (whitespace, indent, header, footer, etc.). - """ - if "whitespace" in field.name: - return True - if "leading_lines" in field.name: - return True - if "lines_after_decorators" in field.name: - return True - if isinstance(node, (IndentedBlock, Module)) and field.name in [ - "header", - "footer", - ]: - return True - if isinstance(node, IndentedBlock) and field.name == "indent": - return True - return False - - -def is_syntax_node_field(node: CSTNode, field: dataclasses.Field[CSTNode]) -> bool: - """ - Returns True if a given CST-node's field is a syntax-related field - (colon, semicolon, dot, encoding, etc.). - """ - if isinstance(node, Module) and field.name in [ - "encoding", - "default_indent", - "default_newline", - "has_trailing_newline", - ]: - return True - type_str = repr(field.type) - if ( - "Sentinel" in type_str - and field.name not in ["star_arg", "star", "posonly_ind"] - and "whitespace" not in field.name - ): - # This is a value that can optionally be specified, so its - # definitely syntax. - return True - - for name in ["Semicolon", "Colon", "Comma", "Dot", "AssignEqual"]: - # These are all nodes that exist for separation syntax - if name in type_str: - return True - - return False - - -def get_field_default_value(field: dataclasses.Field[CSTNode]) -> object: - """ - Returns the default value of a CST-node's field. - """ - if field.default_factory is not dataclasses.MISSING: - # pyre-fixme[29]: `Union[dataclasses._MISSING_TYPE, - # dataclasses._DefaultFactory[object]]` is not a function. - return field.default_factory() - return field.default - - -def is_default_node_field(node: CSTNode, field: dataclasses.Field[CSTNode]) -> bool: - """ - Returns True if a given CST-node's field has its default value. - """ - return deep_equals(getattr(node, field.name), get_field_default_value(field)) - - -def filter_node_fields( - node: CSTNode, - *, - show_defaults: bool, - show_syntax: bool, - show_whitespace: bool, -) -> Sequence[dataclasses.Field[CSTNode]]: - """ - Returns a filtered sequence of a CST-node's fields. - - Setting ``show_whitespace`` to ``False`` will filter whitespace fields. - - Setting ``show_defaults`` to ``False`` will filter fields if their value is equal to - the default value ; while respecting the value of ``show_whitespace``. - - Setting ``show_syntax`` to ``False`` will filter syntax fields ; while respecting - the value of ``show_whitespace`` & ``show_defaults``. - """ - - fields: Sequence[dataclasses.Field[CSTNode]] = dataclasses.fields(node) - # Hide all fields prefixed with "_" - fields = [f for f in fields if f.name[0] != "_"] - # Filter whitespace nodes if needed - if not show_whitespace: - fields = [f for f in fields if not is_whitespace_node_field(node, f)] - # Filter values which aren't changed from their defaults - if not show_defaults: - fields = [f for f in fields if not is_default_node_field(node, f)] - # Filter out values which aren't interesting if needed - if not show_syntax: - fields = [f for f in fields if not is_syntax_node_field(node, f)] - - return fields diff --git a/libcst/helpers/paths.py b/libcst/helpers/paths.py deleted file mode 100644 index 00830ce9..00000000 --- a/libcst/helpers/paths.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -import os -from contextlib import contextmanager -from pathlib import Path -from typing import Generator - -from libcst._types import StrPath - - -@contextmanager -def chdir(path: StrPath) -> Generator[Path, None, None]: - """ - Temporarily chdir to the given path, and then return to the previous path. - """ - try: - path = Path(path).resolve() - cwd = os.getcwd() - os.chdir(path) - yield path - finally: - os.chdir(cwd) diff --git a/libcst/helpers/tests/test_matchers.py b/libcst/helpers/tests/test_matchers.py deleted file mode 100644 index 3f2b9b47..00000000 --- a/libcst/helpers/tests/test_matchers.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. -# - -from libcst import parse_expression, parse_statement -from libcst.helpers.matchers import node_to_matcher -from libcst.matchers import matches -from libcst.testing.utils import data_provider, UnitTest - - -class MatchersTest(UnitTest): - @data_provider( - ( - ('"some string"',), - ("call(some, **kwargs)",), - ("a[b.c]",), - ("[1 for _ in range(99) if False]",), - ) - ) - def test_reflexive_expressions(self, code: str) -> None: - node = parse_expression(code) - matcher = node_to_matcher(node) - self.assertTrue(matches(node, matcher)) - - @data_provider( - ( - ("def foo(a) -> None: pass",), - ("class F: ...",), - ("foo: bar",), - ) - ) - def test_reflexive_statements(self, code: str) -> None: - node = parse_statement(code) - matcher = node_to_matcher(node) - self.assertTrue(matches(node, matcher)) - - def test_whitespace(self) -> None: - code_ws = parse_expression("(foo , bar )") - code = parse_expression("(foo,bar)") - self.assertTrue( - matches( - code, - node_to_matcher(code_ws), - ) - ) - self.assertFalse( - matches( - code, - node_to_matcher(code_ws, match_syntactic_trivia=True), - ) - ) diff --git a/libcst/helpers/tests/test_module.py b/libcst/helpers/tests/test_module.py index 815e1fa2..687e0260 100644 --- a/libcst/helpers/tests/test_module.py +++ b/libcst/helpers/tests/test_module.py @@ -3,22 +3,9 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -from pathlib import Path, PurePath -from typing import Any, Optional -from unittest.mock import patch - -import libcst as cst -from libcst.helpers.common import ensure_type -from libcst.helpers.module import ( - calculate_module_and_package, - get_absolute_module_for_import, - get_absolute_module_for_import_or_raise, - get_absolute_module_from_package_for_import, - get_absolute_module_from_package_for_import_or_raise, - insert_header_comments, - ModuleNameAndPackage, -) -from libcst.testing.utils import data_provider, UnitTest +import libcst +from libcst.helpers import insert_header_comments +from libcst.testing.utils import UnitTest class ModuleTest(UnitTest): @@ -31,7 +18,7 @@ class ModuleTest(UnitTest): expected_code = "\n".join( comment_lines + inserted_comments + empty_lines + non_header_line ) - node = cst.parse_module(original_code) + node = libcst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) @@ -39,7 +26,7 @@ class ModuleTest(UnitTest): # No comment case original_code = "\n".join(empty_lines + non_header_line) expected_code = "\n".join(inserted_comments + empty_lines + non_header_line) - node = cst.parse_module(original_code) + node = libcst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) @@ -47,7 +34,7 @@ class ModuleTest(UnitTest): # No empty lines case original_code = "\n".join(comment_lines + non_header_line) expected_code = "\n".join(comment_lines + inserted_comments + non_header_line) - node = cst.parse_module(original_code) + node = libcst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) @@ -58,7 +45,7 @@ class ModuleTest(UnitTest): expected_code = "\n".join( comment_lines + inserted_comments + empty_lines + non_header_line ) - node = cst.parse_module(original_code) + node = libcst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) @@ -66,285 +53,7 @@ class ModuleTest(UnitTest): # No header case original_code = "\n".join(non_header_line) expected_code = "\n".join(inserted_comments + non_header_line) - node = cst.parse_module(original_code) + node = libcst.parse_module(original_code) self.assertEqual( insert_header_comments(node, inserted_comments).code, expected_code ) - - @data_provider( - ( - # Simple imports that are already absolute. - (None, "from a.b import c", "a.b"), - ("x.y.z", "from a.b import c", "a.b"), - # Relative import that can't be resolved due to missing module. - (None, "from ..w import c", None), - # Relative import that goes past the module level. - ("x", "from ...y import z", None), - ("x.y.z", "from .....w import c", None), - ("x.y.z", "from ... import c", None), - # Correct resolution of absolute from relative modules. - ("x.y.z", "from . import c", "x.y"), - ("x.y.z", "from .. import c", "x"), - ("x.y.z", "from .w import c", "x.y.w"), - ("x.y.z", "from ..w import c", "x.w"), - ("x.y.z", "from ...w import c", "w"), - ) - ) - def test_get_absolute_module( - self, - module: Optional[str], - importfrom: str, - output: Optional[str], - ) -> None: - node = ensure_type(cst.parse_statement(importfrom), cst.SimpleStatementLine) - assert len(node.body) == 1, "Unexpected number of statements!" - import_node = ensure_type(node.body[0], cst.ImportFrom) - - self.assertEqual(get_absolute_module_for_import(module, import_node), output) - if output is None: - with self.assertRaises(Exception): - get_absolute_module_for_import_or_raise(module, import_node) - else: - self.assertEqual( - get_absolute_module_for_import_or_raise(module, import_node), output - ) - - @data_provider( - ( - # Simple imports that are already absolute. - (None, "from a.b import c", "a.b"), - ("x/y/z.py", "from a.b import c", "a.b"), - ("x/y/z/__init__.py", "from a.b import c", "a.b"), - # Relative import that can't be resolved due to missing module. - (None, "from ..w import c", None), - # Attempted relative import with no known parent package - ("__init__.py", "from .y import z", None), - ("x.py", "from .y import z", None), - # Relative import that goes past the module level. - ("x.py", "from ...y import z", None), - ("x/y/z.py", "from ... import c", None), - ("x/y/z.py", "from ...w import c", None), - ("x/y/z/__init__.py", "from .... import c", None), - ("x/y/z/__init__.py", "from ....w import c", None), - # Correct resolution of absolute from relative modules. - ("x/y/z.py", "from . import c", "x.y"), - ("x/y/z.py", "from .. import c", "x"), - ("x/y/z.py", "from .w import c", "x.y.w"), - ("x/y/z.py", "from ..w import c", "x.w"), - ("x/y/z/__init__.py", "from . import c", "x.y.z"), - ("x/y/z/__init__.py", "from .. import c", "x.y"), - ("x/y/z/__init__.py", "from ... import c", "x"), - ("x/y/z/__init__.py", "from .w import c", "x.y.z.w"), - ("x/y/z/__init__.py", "from ..w import c", "x.y.w"), - ("x/y/z/__init__.py", "from ...w import c", "x.w"), - ) - ) - def test_get_absolute_module_from_package( - self, - filename: Optional[str], - importfrom: str, - output: Optional[str], - ) -> None: - package = None - if filename is not None: - info = calculate_module_and_package(".", filename) - package = info.package - node = ensure_type(cst.parse_statement(importfrom), cst.SimpleStatementLine) - assert len(node.body) == 1, "Unexpected number of statements!" - import_node = ensure_type(node.body[0], cst.ImportFrom) - - self.assertEqual( - get_absolute_module_from_package_for_import(package, import_node), output - ) - if output is None: - with self.assertRaises(Exception): - get_absolute_module_from_package_for_import_or_raise( - package, import_node - ) - else: - self.assertEqual( - get_absolute_module_from_package_for_import_or_raise( - package, import_node - ), - output, - ) - - @data_provider( - ( - # Nodes without an asname - (cst.ImportAlias(name=cst.Name("foo")), "foo", None), - ( - cst.ImportAlias(name=cst.Attribute(cst.Name("foo"), cst.Name("bar"))), - "foo.bar", - None, - ), - # Nodes with an asname - ( - cst.ImportAlias( - name=cst.Name("foo"), asname=cst.AsName(name=cst.Name("baz")) - ), - "foo", - "baz", - ), - ( - cst.ImportAlias( - name=cst.Attribute(cst.Name("foo"), cst.Name("bar")), - asname=cst.AsName(name=cst.Name("baz")), - ), - "foo.bar", - "baz", - ), - ) - ) - def test_importalias_helpers( - self, alias_node: cst.ImportAlias, full_name: str, alias: Optional[str] - ) -> None: - self.assertEqual(alias_node.evaluated_name, full_name) - self.assertEqual(alias_node.evaluated_alias, alias) - - @data_provider( - ( - # Various files inside the root should give back valid modules. - ( - "/home/username/root", - "/home/username/root/file.py", - ModuleNameAndPackage("file", ""), - ), - ( - "/home/username/root/", - "/home/username/root/file.py", - ModuleNameAndPackage("file", ""), - ), - ( - "/home/username/root/", - "/home/username/root/some/dir/file.py", - ModuleNameAndPackage("some.dir.file", "some.dir"), - ), - # Various special files inside the root should give back valid modules. - ( - "/home/username/root/", - "/home/username/root/some/dir/__init__.py", - ModuleNameAndPackage("some.dir", "some.dir"), - ), - ( - "/home/username/root/", - "/home/username/root/some/dir/__main__.py", - ModuleNameAndPackage("some.dir", "some.dir"), - ), - ( - "c:/Program Files/", - "c:/Program Files/some/dir/file.py", - ModuleNameAndPackage("some.dir.file", "some.dir"), - ), - ( - "c:/Program Files/", - "c:/Program Files/some/dir/__main__.py", - ModuleNameAndPackage("some.dir", "some.dir"), - ), - ), - ) - def test_calculate_module_and_package( - self, - repo_root: str, - filename: str, - module_and_package: Optional[ModuleNameAndPackage], - ) -> None: - self.assertEqual( - calculate_module_and_package(repo_root, filename), module_and_package - ) - - @data_provider( - ( - ("foo/foo/__init__.py", ModuleNameAndPackage("foo", "foo")), - ("foo/foo/file.py", ModuleNameAndPackage("foo.file", "foo")), - ( - "foo/foo/sub/subfile.py", - ModuleNameAndPackage("foo.sub.subfile", "foo.sub"), - ), - ("libs/bar/bar/thing.py", ModuleNameAndPackage("bar.thing", "bar")), - ( - "noproj/some/file.py", - ModuleNameAndPackage("noproj.some.file", "noproj.some"), - ), - ) - ) - def test_calculate_module_and_package_using_pyproject_toml( - self, - rel_path: str, - module_and_package: Optional[ModuleNameAndPackage], - ) -> None: - mock_tree: dict[str, Any] = { - "home": { - "user": { - "root": { - "foo": { - "pyproject.toml": "content", - "foo": { - "__init__.py": "content", - "file.py": "content", - "sub": { - "subfile.py": "content", - }, - }, - }, - "libs": { - "bar": { - "pyproject.toml": "content", - "bar": { - "__init__.py": "content", - "thing.py": "content", - }, - } - }, - "noproj": { - "some": { - "file.py": "content", - } - }, - }, - }, - }, - } - repo_root = Path("/home/user/root").resolve() - fake_root: Path = repo_root.parent.parent.parent - - def mock_exists(path: PurePath) -> bool: - parts = path.relative_to(fake_root).parts - subtree = mock_tree - for part in parts: - if (subtree := subtree.get(part)) is None: - return False - return True - - with patch("pathlib.Path.exists", new=mock_exists): - self.assertEqual( - calculate_module_and_package( - repo_root, repo_root / rel_path, use_pyproject_toml=True - ), - module_and_package, - ) - - @data_provider( - ( - # Providing a file outside the root should raise an exception - ("/home/username/root", "/some/dummy/file.py"), - ("/home/username/root/", "/some/dummy/file.py"), - ("/home/username/root", "/home/username/file.py"), - # some windows tests - ( - "c:/Program Files/", - "d:/Program Files/some/dir/file.py", - ), - ( - "c:/Program Files/other/", - "c:/Program Files/some/dir/file.py", - ), - ) - ) - def test_invalid_module_and_package( - self, - repo_root: str, - filename: str, - ) -> None: - with self.assertRaises(ValueError): - calculate_module_and_package(repo_root, filename) diff --git a/libcst/helpers/tests/test_node_fields.py b/libcst/helpers/tests/test_node_fields.py deleted file mode 100644 index 61d5ec21..00000000 --- a/libcst/helpers/tests/test_node_fields.py +++ /dev/null @@ -1,314 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from unittest import TestCase - -from libcst import ( - Annotation, - CSTNode, - FunctionDef, - IndentedBlock, - Module, - Param, - parse_module, - Pass, - Semicolon, - SimpleStatementLine, -) - -from libcst.helpers import ( - get_node_fields, - is_default_node_field, - is_syntax_node_field, - is_whitespace_node_field, -) - - -class _NodeFieldsTest(TestCase): - """Node fields related tests.""" - - module: Module - annotation: Annotation - param: Param - _pass: Pass - semicolon: Semicolon - statement: SimpleStatementLine - indent: IndentedBlock - function: FunctionDef - - @classmethod - def setUpClass(cls) -> None: - """Parse a simple CST and references interesting nodes.""" - cls.module = parse_module( - "def foo(a: str) -> None:\n pass ; pass\n return\n" - ) - # /!\ Direct access to nodes - # This is done for test purposes on a known CST - # -> For "real code", use visitors to do this "the correct way" - - # pyre-ignore[8]: direct access for tests - cls.function = cls.module.body[0] - cls.param = cls.function.params.params[0] - # pyre-ignore[8]: direct access for tests - cls.annotation = cls.param.annotation - # pyre-ignore[8]: direct access for tests - cls.indent = cls.function.body - # pyre-ignore[8]: direct access for tests - cls.statement = cls.indent.body[0] - # pyre-ignore[8]: direct access for tests - cls._pass = cls.statement.body[0] - # pyre-ignore[8]: direct access for tests - cls.semicolon = cls.statement.body[0].semicolon - - def test__cst_correctness(self) -> None: - """Test that the CST is correctly parsed.""" - self.assertIsInstance(self.module, Module) - self.assertIsInstance(self.annotation, Annotation) - self.assertIsInstance(self.param, Param) - self.assertIsInstance(self._pass, Pass) - self.assertIsInstance(self.semicolon, Semicolon) - self.assertIsInstance(self.statement, SimpleStatementLine) - self.assertIsInstance(self.indent, IndentedBlock) - self.assertIsInstance(self.function, FunctionDef) - - -class IsWhitespaceNodeFieldTest(_NodeFieldsTest): - """``is_whitespace_node_field`` tests.""" - - def _check_fields(self, is_filtered_field: dict[str, bool], node: CSTNode) -> None: - fields = get_node_fields(node) - self.assertEqual(len(is_filtered_field), len(fields)) - for field in fields: - self.assertEqual( - is_filtered_field[field.name], - is_whitespace_node_field(node, field), - f"Node ``{node.__class__.__qualname__}`` field '{field.name}' " - f"{'should have' if is_filtered_field[field.name] else 'should not have'} " - "been filtered by ``is_whitespace_node_field``", - ) - - def test_module(self) -> None: - """Check if a CST Module node is correctly filtered.""" - is_filtered_field = { - "body": False, - "header": True, - "footer": True, - "encoding": False, - "default_indent": False, - "default_newline": False, - "has_trailing_newline": False, - } - self._check_fields(is_filtered_field, self.module) - - def test_annotation(self) -> None: - """Check if a CST Annotation node is correctly filtered.""" - is_filtered_field = { - "annotation": False, - "whitespace_before_indicator": True, - "whitespace_after_indicator": True, - } - self._check_fields(is_filtered_field, self.annotation) - - def test_param(self) -> None: - """Check if a CST Param node is correctly filtered.""" - is_filtered_field = { - "name": False, - "annotation": False, - "equal": False, - "default": False, - "comma": False, - "star": False, - "whitespace_after_star": True, - "whitespace_after_param": True, - } - self._check_fields(is_filtered_field, self.param) - - def test_semicolon(self) -> None: - """Check if a CST Semicolon node is correctly filtered.""" - is_filtered_field = { - "whitespace_before": True, - "whitespace_after": True, - } - self._check_fields(is_filtered_field, self.semicolon) - - def test_statement(self) -> None: - """Check if a CST SimpleStatementLine node is correctly filtered.""" - is_filtered_field = { - "body": False, - "leading_lines": True, - "trailing_whitespace": True, - } - self._check_fields(is_filtered_field, self.statement) - - def test_indent(self) -> None: - """Check if a CST IndentedBlock node is correctly filtered.""" - is_filtered_field = { - "body": False, - "header": True, - "indent": True, - "footer": True, - } - self._check_fields(is_filtered_field, self.indent) - - def test_function(self) -> None: - """Check if a CST FunctionDef node is correctly filtered.""" - is_filtered_field = { - "name": False, - "params": False, - "body": False, - "decorators": False, - "returns": False, - "asynchronous": False, - "leading_lines": True, - "lines_after_decorators": True, - "whitespace_after_def": True, - "whitespace_after_name": True, - "whitespace_before_params": True, - "whitespace_before_colon": True, - "type_parameters": False, - "whitespace_after_type_parameters": True, - } - self._check_fields(is_filtered_field, self.function) - - -class IsSyntaxNodeFieldTest(_NodeFieldsTest): - """``is_syntax_node_field`` tests.""" - - def _check_fields(self, is_filtered_field: dict[str, bool], node: CSTNode) -> None: - fields = get_node_fields(node) - self.assertEqual(len(is_filtered_field), len(fields)) - for field in fields: - self.assertEqual( - is_filtered_field[field.name], - is_syntax_node_field(node, field), - f"Node ``{node.__class__.__qualname__}`` field '{field.name}' " - f"{'should have' if is_filtered_field[field.name] else 'should not have'} " - "been filtered by ``is_syntax_node_field``", - ) - - def test_module(self) -> None: - """Check if a CST Module node is correctly filtered.""" - is_filtered_field = { - "body": False, - "header": False, - "footer": False, - "encoding": True, - "default_indent": True, - "default_newline": True, - "has_trailing_newline": True, - } - self._check_fields(is_filtered_field, self.module) - - def test_param(self) -> None: - """Check if a CST Param node is correctly filtered.""" - is_filtered_field = { - "name": False, - "annotation": False, - "equal": True, - "default": False, - "comma": True, - "star": False, - "whitespace_after_star": False, - "whitespace_after_param": False, - } - self._check_fields(is_filtered_field, self.param) - - def test_pass(self) -> None: - """Check if a CST Pass node is correctly filtered.""" - is_filtered_field = { - "semicolon": True, - } - self._check_fields(is_filtered_field, self._pass) - - -class IsDefaultNodeFieldTest(_NodeFieldsTest): - """``is_default_node_field`` tests.""" - - def _check_fields(self, is_filtered_field: dict[str, bool], node: CSTNode) -> None: - fields = get_node_fields(node) - self.assertEqual(len(is_filtered_field), len(fields)) - for field in fields: - self.assertEqual( - is_filtered_field[field.name], - is_default_node_field(node, field), - f"Node ``{node.__class__.__qualname__}`` field '{field.name}' " - f"{'should have' if is_filtered_field[field.name] else 'should not have'} " - "been filtered by ``is_default_node_field``", - ) - - def test_module(self) -> None: - """Check if a CST Module node is correctly filtered.""" - is_filtered_field = { - "body": False, - "header": True, - "footer": True, - "encoding": True, - "default_indent": True, - "default_newline": True, - "has_trailing_newline": True, - } - self._check_fields(is_filtered_field, self.module) - - def test_annotation(self) -> None: - """Check if a CST Annotation node is correctly filtered.""" - is_filtered_field = { - "annotation": False, - "whitespace_before_indicator": False, - "whitespace_after_indicator": True, - } - self._check_fields(is_filtered_field, self.annotation) - - def test_param(self) -> None: - """Check if a CST Param node is correctly filtered.""" - is_filtered_field = { - "name": False, - "annotation": False, - "equal": True, - "default": True, - "comma": True, - "star": False, - "whitespace_after_star": True, - "whitespace_after_param": True, - } - self._check_fields(is_filtered_field, self.param) - - def test_statement(self) -> None: - """Check if a CST SimpleStatementLine node is correctly filtered.""" - is_filtered_field = { - "body": False, - "leading_lines": True, - "trailing_whitespace": True, - } - self._check_fields(is_filtered_field, self.statement) - - def test_indent(self) -> None: - """Check if a CST IndentedBlock node is correctly filtered.""" - is_filtered_field = { - "body": False, - "header": True, - "indent": True, - "footer": True, - } - self._check_fields(is_filtered_field, self.indent) - - def test_function(self) -> None: - """Check if a CST FunctionDef node is correctly filtered.""" - is_filtered_field = { - "name": False, - "params": False, - "body": False, - "decorators": True, - "returns": False, - "asynchronous": True, - "leading_lines": True, - "lines_after_decorators": True, - "whitespace_after_def": True, - "whitespace_after_name": True, - "whitespace_before_params": True, - "whitespace_before_colon": True, - "type_parameters": True, - "whitespace_after_type_parameters": True, - } - self._check_fields(is_filtered_field, self.function) diff --git a/libcst/helpers/tests/test_paths.py b/libcst/helpers/tests/test_paths.py deleted file mode 100644 index c36273d4..00000000 --- a/libcst/helpers/tests/test_paths.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from pathlib import Path -from tempfile import TemporaryDirectory - -from libcst.helpers.paths import chdir -from libcst.testing.utils import UnitTest - - -class PathsTest(UnitTest): - def test_chdir(self) -> None: - with TemporaryDirectory() as td: - tdp = Path(td).resolve() - inner = tdp / "foo" / "bar" - inner.mkdir(parents=True) - - with self.subTest("string paths"): - cwd1 = Path.cwd() - - with chdir(tdp.as_posix()) as path2: - cwd2 = Path.cwd() - self.assertEqual(tdp, cwd2) - self.assertEqual(tdp, path2) - - with chdir(inner.as_posix()) as path3: - cwd3 = Path.cwd() - self.assertEqual(inner, cwd3) - self.assertEqual(inner, path3) - - cwd4 = Path.cwd() - self.assertEqual(tdp, cwd4) - self.assertEqual(cwd2, cwd4) - - cwd5 = Path.cwd() - self.assertEqual(cwd1, cwd5) - - with self.subTest("pathlib objects"): - cwd1 = Path.cwd() - - with chdir(tdp) as path2: - cwd2 = Path.cwd() - self.assertEqual(tdp, cwd2) - self.assertEqual(tdp, path2) - - with chdir(inner) as path3: - cwd3 = Path.cwd() - self.assertEqual(inner, cwd3) - self.assertEqual(inner, path3) - - cwd4 = Path.cwd() - self.assertEqual(tdp, cwd4) - self.assertEqual(cwd2, cwd4) - - cwd5 = Path.cwd() - self.assertEqual(cwd1, cwd5) diff --git a/libcst/helpers/tests/test_statement.py b/libcst/helpers/tests/test_statement.py new file mode 100644 index 00000000..f26900bd --- /dev/null +++ b/libcst/helpers/tests/test_statement.py @@ -0,0 +1,87 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +# +from typing import Optional + +import libcst as cst +from libcst.helpers import ( + ensure_type, + get_absolute_module_for_import, + get_absolute_module_for_import_or_raise, +) +from libcst.testing.utils import data_provider, UnitTest + + +class StatementTest(UnitTest): + @data_provider( + ( + # Simple imports that are already absolute. + (None, "from a.b import c", "a.b"), + ("x.y.z", "from a.b import c", "a.b"), + # Relative import that can't be resolved due to missing module. + (None, "from ..w import c", None), + # Relative import that goes past the module level. + ("x", "from ...y import z", None), + ("x.y.z", "from .....w import c", None), + ("x.y.z", "from ... import c", None), + # Correct resolution of absolute from relative modules. + ("x.y.z", "from . import c", "x.y"), + ("x.y.z", "from .. import c", "x"), + ("x.y.z", "from .w import c", "x.y.w"), + ("x.y.z", "from ..w import c", "x.w"), + ("x.y.z", "from ...w import c", "w"), + ) + ) + def test_get_absolute_module( + self, + module: Optional[str], + importfrom: str, + output: Optional[str], + ) -> None: + node = ensure_type(cst.parse_statement(importfrom), cst.SimpleStatementLine) + assert len(node.body) == 1, "Unexpected number of statements!" + import_node = ensure_type(node.body[0], cst.ImportFrom) + + self.assertEqual(get_absolute_module_for_import(module, import_node), output) + if output is None: + with self.assertRaises(Exception): + get_absolute_module_for_import_or_raise(module, import_node) + else: + self.assertEqual( + get_absolute_module_for_import_or_raise(module, import_node), output + ) + + @data_provider( + ( + # Nodes without an asname + (cst.ImportAlias(name=cst.Name("foo")), "foo", None), + ( + cst.ImportAlias(name=cst.Attribute(cst.Name("foo"), cst.Name("bar"))), + "foo.bar", + None, + ), + # Nodes with an asname + ( + cst.ImportAlias( + name=cst.Name("foo"), asname=cst.AsName(name=cst.Name("baz")) + ), + "foo", + "baz", + ), + ( + cst.ImportAlias( + name=cst.Attribute(cst.Name("foo"), cst.Name("bar")), + asname=cst.AsName(name=cst.Name("baz")), + ), + "foo.bar", + "baz", + ), + ) + ) + def test_importalias_helpers( + self, alias_node: cst.ImportAlias, full_name: str, alias: Optional[str] + ) -> None: + self.assertEqual(alias_node.evaluated_name, full_name) + self.assertEqual(alias_node.evaluated_alias, alias) diff --git a/libcst/matchers/__init__.py b/libcst/matchers/__init__.py index 2857fee1..9602de41 100644 --- a/libcst/matchers/__init__.py +++ b/libcst/matchers/__init__.py @@ -6,7 +6,9 @@ # This file was generated by libcst.codegen.gen_matcher_classes from dataclasses import dataclass -from typing import Literal, Optional, Sequence, Union +from typing import Optional, Sequence, Union + +from typing_extensions import Literal import libcst as cst from libcst.matchers._decorators import call_if_inside, call_if_not_inside, leave, visit @@ -142,10 +144,6 @@ class BaseSuite(_NodeABC): pass -class BaseTemplatedStringContent(_NodeABC): - pass - - class BaseUnaryOp(_NodeABC): pass @@ -2319,32 +2317,6 @@ class ClassDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): OneOf[SimpleWhitespaceMatchType], AllOf[SimpleWhitespaceMatchType], ] = DoNotCare() - type_parameters: Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - AllOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - ] = DoNotCare() - whitespace_after_type_parameters: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() metadata: Union[ MetadataMatchType, DoNotCareSentinel, @@ -3117,6 +3089,13 @@ class Continue(BaseSmallStatement, BaseMatcherNode): ] = DoNotCare() +NameOrAttributeOrCallMatchType = Union[ + "Name", + "Attribute", + "Call", + MetadataMatchType, + MatchIfTrue[Union[cst.Name, cst.Attribute, cst.Call]], +] TrailingWhitespaceMatchType = Union[ "TrailingWhitespace", MetadataMatchType, MatchIfTrue[cst.TrailingWhitespace] ] @@ -3125,10 +3104,10 @@ TrailingWhitespaceMatchType = Union[ @dataclass(frozen=True, eq=False, unsafe_hash=False) class Decorator(BaseMatcherNode): decorator: Union[ - BaseExpressionMatchType, + NameOrAttributeOrCallMatchType, DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], + OneOf[NameOrAttributeOrCallMatchType], + AllOf[NameOrAttributeOrCallMatchType], ] = DoNotCare() leading_lines: Union[ Sequence[ @@ -3544,16 +3523,16 @@ CompForMatchType = Union["CompFor", MetadataMatchType, MatchIfTrue[cst.CompFor]] @dataclass(frozen=True, eq=False, unsafe_hash=False) class DictComp(BaseComp, BaseDict, BaseExpression, BaseMatcherNode): key: Union[ - BaseExpressionMatchType, + BaseAssignTargetExpressionMatchType, DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], ] = DoNotCare() value: Union[ - BaseExpressionMatchType, + BaseAssignTargetExpressionMatchType, DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], ] = DoNotCare() for_in: Union[ CompForMatchType, @@ -5726,32 +5705,6 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): OneOf[SimpleWhitespaceMatchType], AllOf[SimpleWhitespaceMatchType], ] = DoNotCare() - type_parameters: Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - AllOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - ] = DoNotCare() - whitespace_after_type_parameters: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() metadata: Union[ MetadataMatchType, DoNotCareSentinel, @@ -5763,10 +5716,10 @@ class FunctionDef(BaseCompoundStatement, BaseStatement, BaseMatcherNode): @dataclass(frozen=True, eq=False, unsafe_hash=False) class GeneratorExp(BaseComp, BaseExpression, BaseSimpleComp, BaseMatcherNode): elt: Union[ - BaseExpressionMatchType, + BaseAssignTargetExpressionMatchType, DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], ] = DoNotCare() for_in: Union[ CompForMatchType, @@ -7411,46 +7364,6 @@ class Index(BaseSlice, BaseMatcherNode): OneOf[BaseExpressionMatchType], AllOf[BaseExpressionMatchType], ] = DoNotCare() - star: Union[ - Optional[Literal["*"]], - MetadataMatchType, - MatchIfTrue[Optional[Literal["*"]]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional[Literal["*"]], - MetadataMatchType, - MatchIfTrue[Optional[Literal["*"]]], - ] - ], - AllOf[ - Union[ - Optional[Literal["*"]], - MetadataMatchType, - MatchIfTrue[Optional[Literal["*"]]], - ] - ], - ] = DoNotCare() - whitespace_after_star: Union[ - Optional["BaseParenthesizableWhitespace"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseParenthesizableWhitespace"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], - ] - ], - AllOf[ - Union[ - Optional["BaseParenthesizableWhitespace"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseParenthesizableWhitespace]], - ] - ], - ] = DoNotCare() metadata: Union[ MetadataMatchType, DoNotCareSentinel, @@ -8294,10 +8207,10 @@ class List( @dataclass(frozen=True, eq=False, unsafe_hash=False) class ListComp(BaseComp, BaseExpression, BaseList, BaseSimpleComp, BaseMatcherNode): elt: Union[ - BaseExpressionMatchType, + BaseAssignTargetExpressionMatchType, DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], ] = DoNotCare() for_in: Union[ CompForMatchType, @@ -12002,31 +11915,6 @@ class ParamSlash(BaseMatcherNode): comma: Union[ CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] ] = DoNotCare() - whitespace_after: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class ParamSpec(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - whitespace_after_star: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() metadata: Union[ MetadataMatchType, DoNotCareSentinel, @@ -12994,10 +12882,10 @@ class Set(BaseExpression, BaseSet, BaseMatcherNode): @dataclass(frozen=True, eq=False, unsafe_hash=False) class SetComp(BaseComp, BaseExpression, BaseSet, BaseSimpleComp, BaseMatcherNode): elt: Union[ - BaseExpressionMatchType, + BaseAssignTargetExpressionMatchType, DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], + OneOf[BaseAssignTargetExpressionMatchType], + AllOf[BaseAssignTargetExpressionMatchType], ] = DoNotCare() for_in: Union[ CompForMatchType, @@ -13756,7 +13644,7 @@ class StarredDictElement(BaseDictElement, BaseMatcherNode): @dataclass(frozen=True, eq=False, unsafe_hash=False) -class StarredElement(BaseElement, BaseExpression, BaseMatcherNode): +class StarredElement(BaseElement, BaseMatcherNode): value: Union[ BaseExpressionMatchType, DoNotCareSentinel, @@ -14287,375 +14175,6 @@ class SubtractAssign(BaseAugOp, BaseMatcherNode): ] = DoNotCare() -BaseTemplatedStringContentMatchType = Union[ - "BaseTemplatedStringContent", - MetadataMatchType, - MatchIfTrue[cst.BaseTemplatedStringContent], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TemplatedString(BaseExpression, BaseString, BaseMatcherNode): - parts: Union[ - Sequence[ - Union[ - BaseTemplatedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseTemplatedStringContentMatchType], - AllOf[BaseTemplatedStringContentMatchType], - AtLeastN[ - Union[ - BaseTemplatedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseTemplatedStringContentMatchType], - AllOf[BaseTemplatedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseTemplatedStringContentMatchType, - DoNotCareSentinel, - OneOf[BaseTemplatedStringContentMatchType], - AllOf[BaseTemplatedStringContentMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.BaseTemplatedStringContent]], - OneOf[ - Union[ - Sequence[ - Union[ - BaseTemplatedStringContentMatchType, - OneOf[BaseTemplatedStringContentMatchType], - AllOf[BaseTemplatedStringContentMatchType], - AtLeastN[ - Union[ - BaseTemplatedStringContentMatchType, - OneOf[BaseTemplatedStringContentMatchType], - AllOf[BaseTemplatedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseTemplatedStringContentMatchType, - OneOf[BaseTemplatedStringContentMatchType], - AllOf[BaseTemplatedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseTemplatedStringContent]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - BaseTemplatedStringContentMatchType, - OneOf[BaseTemplatedStringContentMatchType], - AllOf[BaseTemplatedStringContentMatchType], - AtLeastN[ - Union[ - BaseTemplatedStringContentMatchType, - OneOf[BaseTemplatedStringContentMatchType], - AllOf[BaseTemplatedStringContentMatchType], - ] - ], - AtMostN[ - Union[ - BaseTemplatedStringContentMatchType, - OneOf[BaseTemplatedStringContentMatchType], - AllOf[BaseTemplatedStringContentMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.BaseTemplatedStringContent]], - ] - ], - ] = DoNotCare() - start: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - end: Union[ - Literal['"', "'", '"""', "'''"], - MetadataMatchType, - MatchIfTrue[Literal['"', "'", '"""', "'''"]], - DoNotCareSentinel, - OneOf[ - Union[ - Literal['"', "'", '"""', "'''"], - MetadataMatchType, - MatchIfTrue[Literal['"', "'", '"""', "'''"]], - ] - ], - AllOf[ - Union[ - Literal['"', "'", '"""', "'''"], - MetadataMatchType, - MatchIfTrue[Literal['"', "'", '"""', "'''"]], - ] - ], - ] = DoNotCare() - lpar: Union[ - Sequence[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - DoNotCareSentinel, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.LeftParen]], - OneOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - AtLeastN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - AtMostN[ - Union[ - LeftParenMatchType, - OneOf[LeftParenMatchType], - AllOf[LeftParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.LeftParen]], - ] - ], - ] = DoNotCare() - rpar: Union[ - Sequence[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - DoNotCareSentinel, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.RightParen]], - OneOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - AtLeastN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - AtMostN[ - Union[ - RightParenMatchType, - OneOf[RightParenMatchType], - AllOf[RightParenMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.RightParen]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TemplatedStringExpression(BaseTemplatedStringContent, BaseMatcherNode): - expression: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - conversion: Union[ - Optional[str], - MetadataMatchType, - MatchIfTrue[Optional[str]], - DoNotCareSentinel, - OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]], - ] = DoNotCare() - format_spec: Union[ - Optional[Sequence["BaseTemplatedStringContent"]], - MetadataMatchType, - MatchIfTrue[Optional[Sequence[cst.BaseTemplatedStringContent]]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional[Sequence["BaseTemplatedStringContent"]], - MetadataMatchType, - MatchIfTrue[Optional[Sequence[cst.BaseTemplatedStringContent]]], - ] - ], - AllOf[ - Union[ - Optional[Sequence["BaseTemplatedStringContent"]], - MetadataMatchType, - MatchIfTrue[Optional[Sequence[cst.BaseTemplatedStringContent]]], - ] - ], - ] = DoNotCare() - whitespace_before_expression: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_expression: Union[ - BaseParenthesizableWhitespaceMatchType, - DoNotCareSentinel, - OneOf[BaseParenthesizableWhitespaceMatchType], - AllOf[BaseParenthesizableWhitespaceMatchType], - ] = DoNotCare() - equal: Union[ - Optional["AssignEqual"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AssignEqual]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["AssignEqual"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AssignEqual]], - ] - ], - AllOf[ - Union[ - Optional["AssignEqual"], - MetadataMatchType, - MatchIfTrue[Optional[cst.AssignEqual]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TemplatedStringText(BaseTemplatedStringContent, BaseMatcherNode): - value: Union[ - strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - @dataclass(frozen=True, eq=False, unsafe_hash=False) class TrailingWhitespace(BaseMatcherNode): whitespace: Union[ @@ -15388,307 +14907,6 @@ class Tuple( ] = DoNotCare() -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeAlias(BaseSmallStatement, BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - value: Union[ - BaseExpressionMatchType, - DoNotCareSentinel, - OneOf[BaseExpressionMatchType], - AllOf[BaseExpressionMatchType], - ] = DoNotCare() - type_parameters: Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - AllOf[ - Union[ - Optional["TypeParameters"], - MetadataMatchType, - MatchIfTrue[Optional[cst.TypeParameters]], - ] - ], - ] = DoNotCare() - whitespace_after_type: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_name: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_type_parameters: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - whitespace_after_equals: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - semicolon: Union[ - SemicolonMatchType, - DoNotCareSentinel, - OneOf[SemicolonMatchType], - AllOf[SemicolonMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -TypeVarOrTypeVarTupleOrParamSpecMatchType = Union[ - "TypeVar", - "TypeVarTuple", - "ParamSpec", - MetadataMatchType, - MatchIfTrue[Union[cst.TypeVar, cst.TypeVarTuple, cst.ParamSpec]], -] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeParam(BaseMatcherNode): - param: Union[ - TypeVarOrTypeVarTupleOrParamSpecMatchType, - DoNotCareSentinel, - OneOf[TypeVarOrTypeVarTupleOrParamSpecMatchType], - AllOf[TypeVarOrTypeVarTupleOrParamSpecMatchType], - ] = DoNotCare() - comma: Union[ - CommaMatchType, DoNotCareSentinel, OneOf[CommaMatchType], AllOf[CommaMatchType] - ] = DoNotCare() - equal: Union[ - AssignEqualMatchType, - DoNotCareSentinel, - OneOf[AssignEqualMatchType], - AllOf[AssignEqualMatchType], - ] = DoNotCare() - star: Union[ - Literal["", "*"], - MetadataMatchType, - MatchIfTrue[Literal["", "*"]], - DoNotCareSentinel, - OneOf[ - Union[Literal["", "*"], MetadataMatchType, MatchIfTrue[Literal["", "*"]]] - ], - AllOf[ - Union[Literal["", "*"], MetadataMatchType, MatchIfTrue[Literal["", "*"]]] - ], - ] = DoNotCare() - whitespace_after_star: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - default: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -TypeParamMatchType = Union["TypeParam", MetadataMatchType, MatchIfTrue[cst.TypeParam]] - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeParameters(BaseMatcherNode): - params: Union[ - Sequence[ - Union[ - TypeParamMatchType, - DoNotCareSentinel, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - AtLeastN[ - Union[ - TypeParamMatchType, - DoNotCareSentinel, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - AtMostN[ - Union[ - TypeParamMatchType, - DoNotCareSentinel, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - ] - ], - DoNotCareSentinel, - MatchIfTrue[Sequence[cst.TypeParam]], - OneOf[ - Union[ - Sequence[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - AtLeastN[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - AtMostN[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.TypeParam]], - ] - ], - AllOf[ - Union[ - Sequence[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - AtLeastN[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - AtMostN[ - Union[ - TypeParamMatchType, - OneOf[TypeParamMatchType], - AllOf[TypeParamMatchType], - ] - ], - ] - ], - MatchIfTrue[Sequence[cst.TypeParam]], - ] - ], - ] = DoNotCare() - lbracket: Union[ - LeftSquareBracketMatchType, - DoNotCareSentinel, - OneOf[LeftSquareBracketMatchType], - AllOf[LeftSquareBracketMatchType], - ] = DoNotCare() - rbracket: Union[ - RightSquareBracketMatchType, - DoNotCareSentinel, - OneOf[RightSquareBracketMatchType], - AllOf[RightSquareBracketMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeVar(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - bound: Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - DoNotCareSentinel, - OneOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - AllOf[ - Union[ - Optional["BaseExpression"], - MetadataMatchType, - MatchIfTrue[Optional[cst.BaseExpression]], - ] - ], - ] = DoNotCare() - colon: Union[ - ColonMatchType, DoNotCareSentinel, OneOf[ColonMatchType], AllOf[ColonMatchType] - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - -@dataclass(frozen=True, eq=False, unsafe_hash=False) -class TypeVarTuple(BaseMatcherNode): - name: Union[ - NameMatchType, DoNotCareSentinel, OneOf[NameMatchType], AllOf[NameMatchType] - ] = DoNotCare() - whitespace_after_star: Union[ - SimpleWhitespaceMatchType, - DoNotCareSentinel, - OneOf[SimpleWhitespaceMatchType], - AllOf[SimpleWhitespaceMatchType], - ] = DoNotCare() - metadata: Union[ - MetadataMatchType, - DoNotCareSentinel, - OneOf[MetadataMatchType], - AllOf[MetadataMatchType], - ] = DoNotCare() - - BaseUnaryOpMatchType = Union[ "BaseUnaryOp", MetadataMatchType, MatchIfTrue[cst.BaseUnaryOp] ] @@ -16495,7 +15713,6 @@ __all__ = [ "BaseStatement", "BaseString", "BaseSuite", - "BaseTemplatedStringContent", "BaseUnaryOp", "BinaryOperation", "BitAnd", @@ -16618,7 +15835,6 @@ __all__ = [ "Or", "Param", "ParamSlash", - "ParamSpec", "ParamStar", "Parameters", "ParenthesizedWhitespace", @@ -16648,19 +15864,11 @@ __all__ = [ "SubscriptElement", "Subtract", "SubtractAssign", - "TemplatedString", - "TemplatedStringExpression", - "TemplatedStringText", "TrailingWhitespace", "Try", "TryStar", "Tuple", - "TypeAlias", "TypeOf", - "TypeParam", - "TypeParameters", - "TypeVar", - "TypeVarTuple", "UnaryOperation", "While", "With", diff --git a/libcst/matchers/_decorators.py b/libcst/matchers/_decorators.py index ea69178f..c5cde6ea 100644 --- a/libcst/matchers/_decorators.py +++ b/libcst/matchers/_decorators.py @@ -30,10 +30,12 @@ def call_if_inside( """ def inner(original: _CSTVisitFuncT) -> _CSTVisitFuncT: + if not hasattr(original, VISIT_POSITIVE_MATCHER_ATTR): + setattr(original, VISIT_POSITIVE_MATCHER_ATTR, []) setattr( original, VISIT_POSITIVE_MATCHER_ATTR, - [*getattr(original, VISIT_POSITIVE_MATCHER_ATTR, []), matcher], + [*getattr(original, VISIT_POSITIVE_MATCHER_ATTR), matcher], ) return original @@ -55,10 +57,12 @@ def call_if_not_inside( """ def inner(original: _CSTVisitFuncT) -> _CSTVisitFuncT: + if not hasattr(original, VISIT_NEGATIVE_MATCHER_ATTR): + setattr(original, VISIT_NEGATIVE_MATCHER_ATTR, []) setattr( original, VISIT_NEGATIVE_MATCHER_ATTR, - [*getattr(original, VISIT_NEGATIVE_MATCHER_ATTR, []), matcher], + [*getattr(original, VISIT_NEGATIVE_MATCHER_ATTR), matcher], ) return original @@ -84,10 +88,12 @@ def visit(matcher: BaseMatcherNode) -> Callable[[_CSTVisitFuncT], _CSTVisitFuncT """ def inner(original: _CSTVisitFuncT) -> _CSTVisitFuncT: + if not hasattr(original, CONSTRUCTED_VISIT_MATCHER_ATTR): + setattr(original, CONSTRUCTED_VISIT_MATCHER_ATTR, []) setattr( original, CONSTRUCTED_VISIT_MATCHER_ATTR, - [*getattr(original, CONSTRUCTED_VISIT_MATCHER_ATTR, []), matcher], + [*getattr(original, CONSTRUCTED_VISIT_MATCHER_ATTR), matcher], ) return original @@ -110,10 +116,12 @@ def leave(matcher: BaseMatcherNode) -> Callable[[_CSTVisitFuncT], _CSTVisitFuncT """ def inner(original: _CSTVisitFuncT) -> _CSTVisitFuncT: + if not hasattr(original, CONSTRUCTED_LEAVE_MATCHER_ATTR): + setattr(original, CONSTRUCTED_LEAVE_MATCHER_ATTR, []) setattr( original, CONSTRUCTED_LEAVE_MATCHER_ATTR, - [*getattr(original, CONSTRUCTED_LEAVE_MATCHER_ATTR, []), matcher], + [*getattr(original, CONSTRUCTED_LEAVE_MATCHER_ATTR), matcher], ) return original diff --git a/libcst/matchers/_matcher_base.py b/libcst/matchers/_matcher_base.py index 1727f0df..64670be4 100644 --- a/libcst/matchers/_matcher_base.py +++ b/libcst/matchers/_matcher_base.py @@ -4,6 +4,7 @@ # LICENSE file in the root directory of this source tree. import collections.abc +import copy import inspect import re from abc import ABCMeta @@ -29,8 +30,7 @@ from typing import ( import libcst import libcst.metadata as meta -from libcst import CSTLogicError, FlattenSentinel, MaybeSentinel, RemovalSentinel -from libcst._metadata_dependent import LazyValue +from libcst import FlattenSentinel, MaybeSentinel, RemovalSentinel class DoNotCareSentinel(Enum): @@ -69,6 +69,7 @@ class AbstractBaseMatcherNodeMeta(ABCMeta): matcher. """ + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, node: Type["BaseMatcherNode"]) -> "TypeOf[Type[BaseMatcherNode]]": return TypeOf(self, node) @@ -83,16 +84,25 @@ class BaseMatcherNode: several concrete matchers as options. """ + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self: _BaseMatcherNodeSelfT, other: _OtherNodeT ) -> "OneOf[Union[_BaseMatcherNodeSelfT, _OtherNodeT]]": - return OneOf(self, other) + # Without a cast, pyre thinks that the below OneOf is type OneOf[object] + # even though it has the types passed into it. + return cast( + OneOf[Union[_BaseMatcherNodeSelfT, _OtherNodeT]], OneOf(self, other) + ) def __and__( self: _BaseMatcherNodeSelfT, other: _OtherNodeT ) -> "AllOf[Union[_BaseMatcherNodeSelfT, _OtherNodeT]]": - return AllOf(self, other) + # Without a cast, pyre thinks that the below AllOf is type AllOf[object] + # even though it has the types passed into it. + return cast( + AllOf[Union[_BaseMatcherNodeSelfT, _OtherNodeT]], AllOf(self, other) + ) def __invert__(self: _BaseMatcherNodeSelfT) -> "_BaseMatcherNodeSelfT": return cast(_BaseMatcherNodeSelfT, _InverseOf(self)) @@ -143,7 +153,7 @@ class TypeOf(Generic[_MatcherTypeT], BaseMatcherNode): for option in options: if isinstance(option, TypeOf): if option.initalized: - raise ValueError( + raise Exception( "Cannot chain an uninitalized TypeOf with an initalized one" ) actual_options.extend(option._raw_options) @@ -170,6 +180,7 @@ class TypeOf(Generic[_MatcherTypeT], BaseMatcherNode): self._call_items = (args, kwargs) return self + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self, other: _OtherNodeMatcherTypeT @@ -213,8 +224,8 @@ class OneOf(Generic[_MatcherT], BaseMatcherNode): actual_options: List[_MatcherT] = [] for option in options: if isinstance(option, AllOf): - raise ValueError("Cannot use AllOf and OneOf in combination!") - elif isinstance(option, (OneOf, TypeOf)): + raise Exception("Cannot use AllOf and OneOf in combination!") + elif isinstance(option, OneOf): actual_options.extend(option.options) else: actual_options.append(option) @@ -229,16 +240,19 @@ class OneOf(Generic[_MatcherT], BaseMatcherNode): """ return self._options + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[_MatcherT, _OtherNodeT]]": - return OneOf(self, other) + # Without a cast, pyre thinks that the below OneOf is type OneOf[object] + # even though it has the types passed into it. + return cast(OneOf[Union[_MatcherT, _OtherNodeT]], OneOf(self, other)) def __and__(self, other: _OtherNodeT) -> NoReturn: - raise ValueError("Cannot use AllOf and OneOf in combination!") + raise Exception("Cannot use AllOf and OneOf in combination!") def __invert__(self) -> "AllOf[_MatcherT]": # Invert using De Morgan's Law so we don't have to complicate types. - return AllOf(*[DoesNotMatch(m) for m in self._options]) + return cast(AllOf[_MatcherT], AllOf(*[DoesNotMatch(m) for m in self._options])) def __repr__(self) -> str: return f"OneOf({', '.join([repr(o) for o in self._options])})" @@ -286,9 +300,7 @@ class AllOf(Generic[_MatcherT], BaseMatcherNode): actual_options: List[_MatcherT] = [] for option in options: if isinstance(option, OneOf): - raise ValueError("Cannot use AllOf and OneOf in combination!") - elif isinstance(option, TypeOf): - raise ValueError("Cannot use AllOf and TypeOf in combination!") + raise Exception("Cannot use AllOf and OneOf in combination!") elif isinstance(option, AllOf): actual_options.extend(option.options) else: @@ -304,16 +316,19 @@ class AllOf(Generic[_MatcherT], BaseMatcherNode): """ return self._options + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> NoReturn: - raise ValueError("Cannot use AllOf and OneOf in combination!") + raise Exception("Cannot use AllOf and OneOf in combination!") def __and__(self, other: _OtherNodeT) -> "AllOf[Union[_MatcherT, _OtherNodeT]]": - return AllOf(self, other) + # Without a cast, pyre thinks that the below AllOf is type AllOf[object] + # even though it has the types passed into it. + return cast(AllOf[Union[_MatcherT, _OtherNodeT]], AllOf(self, other)) def __invert__(self) -> "OneOf[_MatcherT]": # Invert using De Morgan's Law so we don't have to complicate types. - return OneOf(*[DoesNotMatch(m) for m in self._options]) + return cast(OneOf[_MatcherT], OneOf(*[DoesNotMatch(m) for m in self._options])) def __repr__(self) -> str: return f"AllOf({', '.join([repr(o) for o in self._options])})" @@ -350,6 +365,7 @@ class _InverseOf(Generic[_MatcherT]): """ return self._matcher + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[_MatcherT, _OtherNodeT]]": # Without a cast, pyre thinks that the below OneOf is type OneOf[object] @@ -420,6 +436,7 @@ class _ExtractMatchingNode(Generic[_MatcherT]): """ return self._name + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[_MatcherT, _OtherNodeT]]": # Without a cast, pyre thinks that the below OneOf is type OneOf[object] @@ -431,7 +448,7 @@ class _ExtractMatchingNode(Generic[_MatcherT]): # that are captured with an and, either all of them will be assigned the # same node, or none of them. It makes more sense to move the SaveMatchedNode # up to wrap the AllOf. - raise ValueError( + raise Exception( ( "Cannot use AllOf with SavedMatchedNode children! Instead, you should " + "use SaveMatchedNode(AllOf(options...))." @@ -447,10 +464,10 @@ class _ExtractMatchingNode(Generic[_MatcherT]): def __invert__(self) -> "_MatcherT": # This doesn't make sense. We don't want to capture a node only if it # doesn't match, since this will never capture anything. - raise ValueError( + raise Exception( ( "Cannot invert a SaveMatchedNode. Instead you should wrap SaveMatchedNode " - "around your inversion itself" + + "around your inversion itself" ) ) @@ -493,16 +510,25 @@ class MatchIfTrue(Generic[_MatchIfTrueT]): """ return self._func + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self, other: _OtherNodeT ) -> "OneOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]]": - return OneOf(self, other) + # Without a cast, pyre thinks that the below OneOf is type OneOf[object] + # even though it has the types passed into it. + return cast( + OneOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]], OneOf(self, other) + ) def __and__( self, other: _OtherNodeT ) -> "AllOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]]": - return AllOf(self, other) + # Without a cast, pyre thinks that the below AllOf is type AllOf[object] + # even though it has the types passed into it. + return cast( + AllOf[Union[MatchIfTrue[_MatchIfTrueT], _OtherNodeT]], AllOf(self, other) + ) def __invert__(self) -> "MatchIfTrue[_MatchIfTrueT]": # Construct a wrapped version of MatchIfTrue for typing simplicity. @@ -532,6 +558,7 @@ def MatchRegex(regex: Union[str, Pattern[str]]) -> MatchIfTrue[str]: def _match_func(value: object) -> bool: if isinstance(value, str): + # pyre-ignore Pyre doesn't think a 'Pattern' can be passed to fullmatch. return bool(re.fullmatch(regex, value)) else: return False @@ -551,11 +578,8 @@ class MatchMetadata(_BaseMetadataMatcher): """ Matcher that looks up the metadata on the current node using the provided metadata provider and compares the value on the node against the value provided - to :class:`MatchMetadata`. - If the metadata provider is unresolved, a :class:`LookupError` exeption will be - raised and ask you to provide a :class:`~libcst.metadata.MetadataWrapper`. - If the metadata value does not exist for a particular node, :class:`MatchMetadata` - will be considered not a match. + to :class:`MatchMetadata`. If the metadata value does not exist for a particular + node, :class:`MatchMetadata` will always be considered not a match. For example, to match against any function call which has one parameter which is used in a load expression context:: @@ -613,12 +637,15 @@ class MatchMetadata(_BaseMetadataMatcher): """ return self._value + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: _OtherNodeT) -> "OneOf[Union[MatchMetadata, _OtherNodeT]]": - return OneOf(self, other) + # Without the cast, pyre doesn't know this is valid + return cast(OneOf[Union[MatchMetadata, _OtherNodeT]], OneOf(self, other)) def __and__(self, other: _OtherNodeT) -> "AllOf[Union[MatchMetadata, _OtherNodeT]]": - return AllOf(self, other) + # Without the cast, pyre doesn't know this is valid + return cast(AllOf[Union[MatchMetadata, _OtherNodeT]], AllOf(self, other)) def __invert__(self) -> "MatchMetadata": # We intentionally lie here, for the same reason given in the documentation @@ -634,10 +661,8 @@ class MatchMetadataIfTrue(_BaseMetadataMatcher): Matcher that looks up the metadata on the current node using the provided metadata provider and passes it to a callable which can inspect the metadata further, returning ``True`` if the matcher should be considered a match. - If the metadata provider is unresolved, a :class:`LookupError` exeption will be - raised and ask you to provide a :class:`~libcst.metadata.MetadataWrapper`. If the metadata value does not exist for a particular node, - :class:`MatchMetadataIfTrue` will be considered not a match. + :class:`MatchMetadataIfTrue` will always be considered not a match. For example, to match against any arg whose qualified name might be ``typing.Dict``:: @@ -696,16 +721,19 @@ class MatchMetadataIfTrue(_BaseMetadataMatcher): """ return self._func + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__( self, other: _OtherNodeT ) -> "OneOf[Union[MatchMetadataIfTrue, _OtherNodeT]]": - return OneOf(self, other) + # Without the cast, pyre doesn't know this is valid + return cast(OneOf[Union[MatchMetadataIfTrue, _OtherNodeT]], OneOf(self, other)) def __and__( self, other: _OtherNodeT ) -> "AllOf[Union[MatchMetadataIfTrue, _OtherNodeT]]": - return AllOf(self, other) + # Without the cast, pyre doesn't know this is valid + return cast(AllOf[Union[MatchMetadataIfTrue, _OtherNodeT]], AllOf(self, other)) def __invert__(self) -> "MatchMetadataIfTrue": # Construct a wrapped version of MatchMetadataIfTrue for typing simplicity. @@ -761,9 +789,7 @@ class AtLeastN(Generic[_MatcherT], _BaseWildcardNode): n: int, ) -> None: if n < 0: - raise ValueError( - f"{self.__class__.__qualname__} n attribute must be positive" - ) + raise Exception(f"{self.__class__.__name__} n attribute must be positive") self._n: int = n self._matcher: Union[_MatcherT, DoNotCareSentinel] = matcher @@ -784,15 +810,16 @@ class AtLeastN(Generic[_MatcherT], _BaseWildcardNode): """ return self._matcher + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: object) -> NoReturn: - raise ValueError("AtLeastN cannot be used in a OneOf matcher") + raise Exception("AtLeastN cannot be used in a OneOf matcher") def __and__(self, other: object) -> NoReturn: - raise ValueError("AtLeastN cannot be used in an AllOf matcher") + raise Exception("AtLeastN cannot be used in an AllOf matcher") def __invert__(self) -> NoReturn: - raise ValueError("Cannot invert an AtLeastN matcher!") + raise Exception("Cannot invert an AtLeastN matcher!") def __repr__(self) -> str: if self._n == 0: @@ -802,7 +829,7 @@ class AtLeastN(Generic[_MatcherT], _BaseWildcardNode): def ZeroOrMore( - matcher: Union[_MatcherT, DoNotCareSentinel] = DoNotCareSentinel.DEFAULT, + matcher: Union[_MatcherT, DoNotCareSentinel] = DoNotCareSentinel.DEFAULT ) -> AtLeastN[Union[_MatcherT, DoNotCareSentinel]]: """ Used as a convenience wrapper to :class:`AtLeastN` when ``n`` is equal to ``0``. @@ -865,9 +892,7 @@ class AtMostN(Generic[_MatcherT], _BaseWildcardNode): n: int, ) -> None: if n < 0: - raise ValueError( - f"{self.__class__.__qualname__} n attribute must be positive" - ) + raise Exception(f"{self.__class__.__name__} n attribute must be positive") self._n: int = n self._matcher: Union[_MatcherT, DoNotCareSentinel] = matcher @@ -889,15 +914,16 @@ class AtMostN(Generic[_MatcherT], _BaseWildcardNode): """ return self._matcher + # pyre-fixme[14]: `__or__` overrides method defined in `type` inconsistently. # pyre-fixme[15]: `__or__` overrides method defined in `type` inconsistently. def __or__(self, other: object) -> NoReturn: - raise ValueError("AtMostN cannot be used in a OneOf matcher") + raise Exception("AtMostN cannot be used in a OneOf matcher") def __and__(self, other: object) -> NoReturn: - raise ValueError("AtMostN cannot be used in an AllOf matcher") + raise Exception("AtMostN cannot be used in an AllOf matcher") def __invert__(self) -> NoReturn: - raise ValueError("Cannot invert an AtMostN matcher!") + raise Exception("Cannot invert an AtMostN matcher!") def __repr__(self) -> str: if self._n == 1: @@ -907,7 +933,7 @@ class AtMostN(Generic[_MatcherT], _BaseWildcardNode): def ZeroOrOne( - matcher: Union[_MatcherT, DoNotCareSentinel] = DoNotCareSentinel.DEFAULT, + matcher: Union[_MatcherT, DoNotCareSentinel] = DoNotCareSentinel.DEFAULT ) -> AtMostN[Union[_MatcherT, DoNotCareSentinel]]: """ Used as a convenience wrapper to :class:`AtMostN` when ``n`` is equal to ``1``. @@ -1021,7 +1047,7 @@ def _matches_zero_nodes( MatchIfTrue[libcst.CSTNode], _BaseMetadataMatcher, DoNotCareSentinel, - ], + ] ) -> bool: if isinstance(matcher, AtLeastN) and matcher.n == 0: return True @@ -1162,7 +1188,7 @@ def _sequence_matches( # noqa: C901 else: # There are no other types of wildcard consumers, but we're making # pyre happy with that fact. - raise CSTLogicError(f"Logic error unrecognized wildcard {type(matcher)}!") + raise Exception(f"Logic error unrecognized wildcard {type(matcher)}!") elif isinstance(matcher, _ExtractMatchingNode): # See if the raw matcher matches. If it does, capture the sequence we matched and store it. result = _sequence_matches( @@ -1358,7 +1384,7 @@ def _metadata_matches( # noqa: C901 return None return {} if actual_value == metadata.value else None else: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") def _node_matches( # noqa: C901 @@ -1491,13 +1517,11 @@ def _matches( return _node_matches(node, matcher, metadata_lookup) -def _construct_metadata_fetcher_null() -> ( - Callable[[meta.ProviderT, libcst.CSTNode], object] -): - def _fetch(provider: meta.ProviderT, node: libcst.CSTNode) -> NoReturn: - raise LookupError( - f"{provider.__name__} is not resolved; did you forget a MetadataWrapper?" - ) +def _construct_metadata_fetcher_null() -> Callable[ + [meta.ProviderT, libcst.CSTNode], object +]: + def _fetch(*args: object, **kwargs: object) -> object: + return _METADATA_MISSING_SENTINEL return _fetch @@ -1520,11 +1544,7 @@ def _construct_metadata_fetcher_wrapper( if provider not in metadata: metadata[provider] = wrapper.resolve(provider) - node_metadata = metadata[provider].get(node, _METADATA_MISSING_SENTINEL) - if isinstance(node_metadata, LazyValue): - node_metadata = node_metadata() - - return node_metadata + return metadata.get(provider, {}).get(node, _METADATA_MISSING_SENTINEL) return _fetch @@ -1797,7 +1817,7 @@ class _ReplaceTransformer(libcst.CSTTransformer): if inspect.isfunction(replacement): self.replacement = replacement elif isinstance(replacement, (MaybeSentinel, RemovalSentinel)): - self.replacement = lambda node, matches: replacement + self.replacement = lambda node, matches: copy.deepcopy(replacement) else: # pyre-ignore We know this is a CSTNode. self.replacement = lambda node, matches: replacement.deep_clone() @@ -1912,7 +1932,7 @@ def replace( """ if isinstance(tree, (RemovalSentinel, MaybeSentinel)): # We can't do any replacements on this, so return the tree exactly. - return tree + return copy.deepcopy(tree) if isinstance(matcher, (AtLeastN, AtMostN)): # We can't match this, since these matchers are forbidden at top level. # These are not subclasses of BaseMatcherNode, but in the case that the @@ -1922,7 +1942,7 @@ def replace( elif isinstance(tree, meta.MetadataWrapper): return tree.module.deep_clone() else: - raise CSTLogicError("Logic error!") + raise Exception("Logic error!") if isinstance(tree, meta.MetadataWrapper) and metadata_resolver is None: # Provide a convenience for calling replace directly on a MetadataWrapper. @@ -1939,5 +1959,5 @@ def replace( new_tree = tree.visit(replacer) if isinstance(new_tree, FlattenSentinel): # The above transform never returns FlattenSentinel, so this isn't possible - raise CSTLogicError("Logic error, cannot get a FlattenSentinel here!") + raise Exception("Logic error, cannot get a FlattenSentinel here!") return new_tree diff --git a/libcst/matchers/_return_types.py b/libcst/matchers/_return_types.py index 2f050088..bc8e9042 100644 --- a/libcst/matchers/_return_types.py +++ b/libcst/matchers/_return_types.py @@ -20,7 +20,6 @@ from libcst._nodes.expression import ( BaseExpression, BaseFormattedStringContent, BaseSlice, - BaseTemplatedStringContent, BinaryOperation, BooleanOperation, Call, @@ -67,9 +66,6 @@ from libcst._nodes.expression import ( StarredElement, Subscript, SubscriptElement, - TemplatedString, - TemplatedStringExpression, - TemplatedStringText, Tuple, UnaryOperation, Yield, @@ -178,7 +174,6 @@ from libcst._nodes.statement import ( MatchValue, NameItem, Nonlocal, - ParamSpec, Pass, Raise, Return, @@ -186,11 +181,6 @@ from libcst._nodes.statement import ( SimpleStatementSuite, Try, TryStar, - TypeAlias, - TypeParam, - TypeParameters, - TypeVar, - TypeVarTuple, While, With, WithItem, @@ -333,7 +323,6 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { Or: BaseBooleanOp, Param: Union[Param, MaybeSentinel, RemovalSentinel], ParamSlash: Union[ParamSlash, MaybeSentinel], - ParamSpec: ParamSpec, ParamStar: Union[ParamStar, MaybeSentinel], Parameters: Parameters, ParenthesizedWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], @@ -357,23 +346,15 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = { SimpleWhitespace: Union[BaseParenthesizableWhitespace, MaybeSentinel], Slice: BaseSlice, StarredDictElement: Union[BaseDictElement, RemovalSentinel], - StarredElement: BaseExpression, + StarredElement: Union[BaseElement, RemovalSentinel], Subscript: BaseExpression, SubscriptElement: Union[SubscriptElement, RemovalSentinel], Subtract: BaseBinaryOp, SubtractAssign: BaseAugOp, - TemplatedString: BaseExpression, - TemplatedStringExpression: Union[BaseTemplatedStringContent, RemovalSentinel], - TemplatedStringText: Union[BaseTemplatedStringContent, RemovalSentinel], TrailingWhitespace: TrailingWhitespace, Try: Union[BaseStatement, RemovalSentinel], TryStar: Union[BaseStatement, RemovalSentinel], Tuple: BaseExpression, - TypeAlias: Union[BaseSmallStatement, RemovalSentinel], - TypeParam: Union[TypeParam, RemovalSentinel], - TypeParameters: TypeParameters, - TypeVar: TypeVar, - TypeVarTuple: TypeVarTuple, UnaryOperation: BaseExpression, While: Union[BaseStatement, RemovalSentinel], With: Union[BaseStatement, RemovalSentinel], diff --git a/libcst/matchers/_visitors.py b/libcst/matchers/_visitors.py index b9252173..ded6eb9d 100644 --- a/libcst/matchers/_visitors.py +++ b/libcst/matchers/_visitors.py @@ -45,26 +45,12 @@ from libcst.matchers._matcher_base import ( ) from libcst.matchers._return_types import TYPED_FUNCTION_RETURN_MAPPING -try: - # PEP 604 unions, in Python 3.10+ - from types import UnionType -except ImportError: - # We use this for isinstance; no annotation will be an instance of this - class UnionType: - pass - - CONCRETE_METHODS: Set[str] = { *{f"visit_{cls.__name__}" for cls in TYPED_FUNCTION_RETURN_MAPPING}, *{f"leave_{cls.__name__}" for cls in TYPED_FUNCTION_RETURN_MAPPING}, } -def is_property(obj: object, attr_name: str) -> bool: - """Check if obj.attr is a property without evaluating it.""" - return isinstance(getattr(type(obj), attr_name, None), property) - - # pyre-ignore We don't care about Any here, its not exposed. def _match_decorator_unpickler(kwargs: Any) -> "MatchDecoratorMismatch": return MatchDecoratorMismatch(**kwargs) @@ -92,15 +78,18 @@ def _get_possible_match_classes(matcher: BaseMatcherNode) -> List[Type[cst.CSTNo return [getattr(cst, matcher.__class__.__name__)] -def _annotation_is_union(annotation: object) -> bool: +def _annotation_looks_like_union(annotation: object) -> bool: + if getattr(annotation, "__origin__", None) is Union: + return True + # support PEP-604 style unions introduced in Python 3.10 return ( - isinstance(annotation, UnionType) - or getattr(annotation, "__origin__", None) is Union + annotation.__class__.__name__ == "Union" + and annotation.__class__.__module__ == "types" ) def _get_possible_annotated_classes(annotation: object) -> List[Type[object]]: - if _annotation_is_union(annotation): + if _annotation_looks_like_union(annotation): return getattr(annotation, "__args__", []) else: return [cast(Type[object], annotation)] @@ -137,6 +126,7 @@ def _verify_return_annotation( # it is "None". if type_hints.get("return", type(None)) is not type(None): # noqa: E721 raise MatchDecoratorMismatch( + # pyre-fixme[16]: Anonymous callable has no attribute `__qualname__`. meth.__qualname__, f"@{decorator_name} should only decorate functions that do " + "not return.", @@ -185,6 +175,7 @@ def _verify_parameter_annotations( meth_signature = signature(meth) if len(meth_signature.parameters) != expected_param_count: raise MatchDecoratorMismatch( + # pyre-fixme[16]: Anonymous callable has no attribute `__qualname__`. meth.__qualname__, f"@{decorator_name} should decorate functions which take " + f"{expected_param_count} parameter" @@ -241,6 +232,8 @@ def _check_types( # First thing first, make sure this isn't wrapping an inner class. if not ismethod(meth): raise MatchDecoratorMismatch( + # pyre-fixme[16]: Anonymous callable has no attribute + # `__qualname__`. meth.__qualname__, "Matcher decorators should only be used on methods of " + "MatcherDecoratableTransformer or " @@ -270,22 +263,20 @@ def _check_types( ) -def _gather_matchers(obj: object) -> Dict[BaseMatcherNode, Optional[cst.CSTNode]]: - """ - Set of gating matchers that we need to track and evaluate. We use these - in conjunction with the call_if_inside and call_if_not_inside decorators - to determine whether to call a visit/leave function. - """ +def _gather_matchers(obj: object) -> Set[BaseMatcherNode]: + visit_matchers: Set[BaseMatcherNode] = set() - visit_matchers: Dict[BaseMatcherNode, Optional[cst.CSTNode]] = {} - - for attr_name in dir(obj): - if not is_property(obj, attr_name): - func = getattr(obj, attr_name) - for matcher in getattr(func, VISIT_POSITIVE_MATCHER_ATTR, []): - visit_matchers[cast(BaseMatcherNode, matcher)] = None - for matcher in getattr(func, VISIT_NEGATIVE_MATCHER_ATTR, []): - visit_matchers[cast(BaseMatcherNode, matcher)] = None + for func in dir(obj): + try: + for matcher in getattr(getattr(obj, func), VISIT_POSITIVE_MATCHER_ATTR, []): + visit_matchers.add(cast(BaseMatcherNode, matcher)) + for matcher in getattr(getattr(obj, func), VISIT_NEGATIVE_MATCHER_ATTR, []): + visit_matchers.add(cast(BaseMatcherNode, matcher)) + except Exception: + # This could be a caculated property, and calling getattr() evaluates it. + # We have no control over the implementation detail, so if it raises, we + # should not crash. + pass return visit_matchers @@ -295,6 +286,7 @@ def _assert_not_concrete( ) -> None: if func.__name__ in CONCRETE_METHODS: raise MatchDecoratorMismatch( + # pyre-ignore This anonymous method has a qualname. func.__qualname__, f"@{decorator_name} should not decorate functions that are concrete " + "visit or leave methods.", @@ -309,18 +301,26 @@ def _gather_constructed_visit_funcs( ] = {} for funcname in dir(obj): - if is_property(obj, funcname): + try: + possible_func = getattr(obj, funcname) + if not ismethod(possible_func): + continue + func = cast(Callable[[cst.CSTNode], None], possible_func) + except Exception: + # This could be a caculated property, and calling getattr() evaluates it. + # We have no control over the implementation detail, so if it raises, we + # should not crash. continue - possible_func = getattr(obj, funcname) - if not ismethod(possible_func): - continue - func = cast(Callable[[cst.CSTNode], None], possible_func) matchers = getattr(func, CONSTRUCTED_VISIT_MATCHER_ATTR, []) if matchers: # Make sure that we aren't accidentally putting a @visit on a visit_Node. _assert_not_concrete("visit", func) for matcher in matchers: casted_matcher = cast(BaseMatcherNode, matcher) + # pyre-fixme[6]: Expected + # `Sequence[typing.Callable[[cst._nodes.base.CSTNode], None]]` for 2nd + # param but got `Tuple[*Tuple[(CSTNode) -> None, ...], (CSTNode) -> + # None]`. constructed_visitors[casted_matcher] = ( *constructed_visitors.get(casted_matcher, ()), func, @@ -340,18 +340,26 @@ def _gather_constructed_leave_funcs( ] = {} for funcname in dir(obj): - if is_property(obj, funcname): + try: + possible_func = getattr(obj, funcname) + if not ismethod(possible_func): + continue + func = cast(Callable[[cst.CSTNode], None], possible_func) + except Exception: + # This could be a caculated property, and calling getattr() evaluates it. + # We have no control over the implementation detail, so if it raises, we + # should not crash. continue - possible_func = getattr(obj, funcname) - if not ismethod(possible_func): - continue - func = cast(Callable[[cst.CSTNode], None], possible_func) matchers = getattr(func, CONSTRUCTED_LEAVE_MATCHER_ATTR, []) if matchers: # Make sure that we aren't accidentally putting a @leave on a leave_Node. _assert_not_concrete("leave", func) for matcher in matchers: casted_matcher = cast(BaseMatcherNode, matcher) + # pyre-fixme[6]: Expected + # `Sequence[typing.Callable[[cst._nodes.base.CSTNode], None]]` for 2nd + # param but got `Tuple[*Tuple[(CSTNode) -> None, ...], (CSTNode) -> + # None]`. constructed_visitors[casted_matcher] = ( *constructed_visitors.get(casted_matcher, ()), func, @@ -447,7 +455,12 @@ class MatcherDecoratableTransformer(CSTTransformer): def __init__(self) -> None: CSTTransformer.__init__(self) - self.__matchers: Optional[Dict[BaseMatcherNode, Optional[cst.CSTNode]]] = None + # List of gating matchers that we need to track and evaluate. We use these + # in conjuction with the call_if_inside and call_if_not_inside decorators + # to determine whether or not to call a visit/leave function. + self._matchers: Dict[BaseMatcherNode, Optional[cst.CSTNode]] = { + m: None for m in _gather_matchers(self) + } # Mapping of matchers to functions. If in the course of visiting the tree, # a node matches one of these matchers, the corresponding function will be # called as if it was a visit_* method. @@ -480,16 +493,6 @@ class MatcherDecoratableTransformer(CSTTransformer): expected_none_return=False, ) - @property - def _matchers(self) -> Dict[BaseMatcherNode, Optional[cst.CSTNode]]: - if self.__matchers is None: - self.__matchers = _gather_matchers(self) - return self.__matchers - - @_matchers.setter - def _matchers(self, value: Dict[BaseMatcherNode, Optional[cst.CSTNode]]) -> None: - self.__matchers = value - def on_visit(self, node: cst.CSTNode) -> bool: # First, evaluate any matchers that we have which we are not inside already. self._matchers = _visit_matchers(self._matchers, node, self) @@ -664,7 +667,12 @@ class MatcherDecoratableVisitor(CSTVisitor): def __init__(self) -> None: CSTVisitor.__init__(self) - self.__matchers: Optional[Dict[BaseMatcherNode, Optional[cst.CSTNode]]] = None + # List of gating matchers that we need to track and evaluate. We use these + # in conjuction with the call_if_inside and call_if_not_inside decorators + # to determine whether or not to call a visit/leave function. + self._matchers: Dict[BaseMatcherNode, Optional[cst.CSTNode]] = { + m: None for m in _gather_matchers(self) + } # Mapping of matchers to functions. If in the course of visiting the tree, # a node matches one of these matchers, the corresponding function will be # called as if it was a visit_* method. @@ -692,16 +700,6 @@ class MatcherDecoratableVisitor(CSTVisitor): expected_none_return=True, ) - @property - def _matchers(self) -> Dict[BaseMatcherNode, Optional[cst.CSTNode]]: - if self.__matchers is None: - self.__matchers = _gather_matchers(self) - return self.__matchers - - @_matchers.setter - def _matchers(self, value: Dict[BaseMatcherNode, Optional[cst.CSTNode]]) -> None: - self.__matchers = value - def on_visit(self, node: cst.CSTNode) -> bool: # First, evaluate any matchers that we have which we are not inside already. self._matchers = _visit_matchers(self._matchers, node, self) diff --git a/libcst/matchers/tests/test_decorators.py b/libcst/matchers/tests/test_decorators.py index 8b28657c..7486cee8 100644 --- a/libcst/matchers/tests/test_decorators.py +++ b/libcst/matchers/tests/test_decorators.py @@ -3,11 +3,10 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -import sys from ast import literal_eval from textwrap import dedent from typing import List, Set -from unittest import skipIf +from unittest.mock import Mock import libcst as cst import libcst.matchers as m @@ -997,14 +996,22 @@ class MatchersVisitLeaveDecoratorsTest(UnitTest): self.assertEqual(visitor.visits, ['"baz"']) +# This is meant to simulate `cst.ImportFrom | cst.RemovalSentinel` in py3.10 +FakeUnionClass: Mock = Mock() +setattr(FakeUnionClass, "__name__", "Union") +setattr(FakeUnionClass, "__module__", "types") +FakeUnion: Mock = Mock() +FakeUnion.__class__ = FakeUnionClass +FakeUnion.__args__ = [cst.ImportFrom, cst.RemovalSentinel] + + class MatchersUnionDecoratorsTest(UnitTest): - @skipIf(bool(sys.version_info < (3, 10)), "new union syntax not available") def test_init_with_new_union_annotation(self) -> None: class TransformerWithUnionReturnAnnotation(m.MatcherDecoratableTransformer): @m.leave(m.ImportFrom(module=m.Name(value="typing"))) def test( self, original_node: cst.ImportFrom, updated_node: cst.ImportFrom - ) -> cst.ImportFrom | cst.RemovalSentinel: + ) -> FakeUnion: pass # assert that init (specifically _check_types on return annotation) passes diff --git a/libcst/matchers/tests/test_findall.py b/libcst/matchers/tests/test_findall.py index 6e81e481..77316a1a 100644 --- a/libcst/matchers/tests/test_findall.py +++ b/libcst/matchers/tests/test_findall.py @@ -103,17 +103,14 @@ class MatchersFindAllTest(UnitTest): ], ) - # Test that failing to provide metadata leads to raising an informative exception - with self.assertRaises( - LookupError, - msg="ExpressionContextProvider is not resolved; did you forget a MetadataWrapper?", - ): - booleans = findall( - wrapper.module, - m.MatchMetadata( - meta.ExpressionContextProvider, meta.ExpressionContext.STORE - ), - ) + # Test that failing to provide metadata leads to no match + booleans = findall( + wrapper.module, + m.MatchMetadata( + meta.ExpressionContextProvider, meta.ExpressionContext.STORE + ), + ) + self.assertNodeSequenceEqual(booleans, []) def test_findall_with_visitors(self) -> None: # Find all assignments in a tree diff --git a/libcst/matchers/tests/test_matchers.py b/libcst/matchers/tests/test_matchers.py index e41bd866..9d79f640 100644 --- a/libcst/matchers/tests/test_matchers.py +++ b/libcst/matchers/tests/test_matchers.py @@ -291,13 +291,6 @@ class MatchersMatcherTest(UnitTest): self.assertTrue( matches(cst.Name("True"), m.OneOf(m.Name("True"), m.Name("False"))) ) - # Match when one of the option is a TypeOf - self.assertTrue( - matches( - cst.Name("True"), - m.OneOf(m.TypeOf(m.Name, m.NameItem)("True"), m.Name("False")), - ) - ) # Match any assignment that assigns a value of True or False to an # unspecified target. self.assertTrue( diff --git a/libcst/matchers/tests/test_matchers_with_metadata.py b/libcst/matchers/tests/test_matchers_with_metadata.py index 63530c37..e4bdf07e 100644 --- a/libcst/matchers/tests/test_matchers_with_metadata.py +++ b/libcst/matchers/tests/test_matchers_with_metadata.py @@ -366,18 +366,14 @@ class MatchersMetadataTest(UnitTest): ) ) - def test_lambda_metadata_matcher_with_unresolved_metadata(self) -> None: + def test_lambda_metadata_matcher_with_no_metadata(self) -> None: # Match on qualified name provider module = cst.parse_module( "from typing import List\n\ndef foo() -> None: pass\n" ) functiondef = cst.ensure_type(module.body[1], cst.FunctionDef) - # Test that when the metadata is unresolved, raise an informative exception. - with self.assertRaises( - LookupError, - msg="QualifiedNameProvider is not resolved; did you forget a MetadataWrapper?", - ): + self.assertFalse( matches( functiondef, m.FunctionDef( @@ -389,24 +385,6 @@ class MatchersMetadataTest(UnitTest): ) ), ) - - def test_lambda_metadata_matcher_with_no_metadata(self) -> None: - class VoidProvider(meta.BatchableMetadataProvider[object]): - """A dummy metadata provider""" - - module = cst.parse_module( - "from typing import List\n\ndef foo() -> None: pass\n" - ) - wrapper = cst.MetadataWrapper(module) - functiondef = cst.ensure_type(wrapper.module.body[1], cst.FunctionDef) - - # Test that when the node has no corresponding metadata, there is no match. - self.assertFalse( - matches( - functiondef, - m.FunctionDef(name=m.MatchMetadataIfTrue(VoidProvider, lambda _: True)), - metadata_resolver=wrapper, - ) ) def test_lambda_metadata_matcher_operators(self) -> None: diff --git a/libcst/metadata/__init__.py b/libcst/metadata/__init__.py index ecc42741..75e38229 100644 --- a/libcst/metadata/__init__.py +++ b/libcst/metadata/__init__.py @@ -5,7 +5,6 @@ from libcst._position import CodePosition, CodeRange -from libcst.metadata.accessor_provider import AccessorProvider from libcst.metadata.base_provider import ( BaseMetadataProvider, BatchableMetadataProvider, @@ -16,7 +15,6 @@ from libcst.metadata.expression_context_provider import ( ExpressionContext, ExpressionContextProvider, ) -from libcst.metadata.file_path_provider import FilePathProvider from libcst.metadata.full_repo_manager import FullRepoManager from libcst.metadata.name_provider import ( FullyQualifiedNameProvider, @@ -88,8 +86,6 @@ __all__ = [ "Accesses", "TypeInferenceProvider", "FullRepoManager", - "AccessorProvider", - "FilePathProvider", # Experimental APIs: "ExperimentalReentrantCodegenProvider", "CodegenPartial", diff --git a/libcst/metadata/accessor_provider.py b/libcst/metadata/accessor_provider.py deleted file mode 100644 index 5d4f22e4..00000000 --- a/libcst/metadata/accessor_provider.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -import dataclasses - -import libcst as cst - -from libcst.metadata.base_provider import VisitorMetadataProvider - - -class AccessorProvider(VisitorMetadataProvider[str]): - def on_visit(self, node: cst.CSTNode) -> bool: - for f in dataclasses.fields(node): - child = getattr(node, f.name) - self.set_metadata(child, f.name) - return True diff --git a/libcst/metadata/base_provider.py b/libcst/metadata/base_provider.py index 5d93fbe8..69af2dce 100644 --- a/libcst/metadata/base_provider.py +++ b/libcst/metadata/base_provider.py @@ -6,23 +6,22 @@ from pathlib import Path from types import MappingProxyType from typing import ( + Callable, + cast, Generic, List, Mapping, MutableMapping, Optional, - Protocol, Type, TYPE_CHECKING, TypeVar, - Union, ) from libcst._batched_visitor import BatchableCSTVisitor from libcst._metadata_dependent import ( _T as _MetadataT, _UNDEFINED_DEFAULT, - LazyValue, MetadataDependent, ) from libcst._visitors import CSTVisitor @@ -37,18 +36,6 @@ ProviderT = Type["BaseMetadataProvider[object]"] # BaseMetadataProvider[int] would be a subtype of BaseMetadataProvider[object], so the # typevar is covariant. _ProvidedMetadataT = TypeVar("_ProvidedMetadataT", covariant=True) -MaybeLazyMetadataT = Union[LazyValue[_ProvidedMetadataT], _ProvidedMetadataT] - - -class GenCacheMethod(Protocol): - def __call__( - self, - root_path: Path, - paths: List[str], - *, - timeout: Optional[int] = None, - use_pyproject_toml: bool = False, - ) -> Mapping[str, object]: ... # We can't use an ABCMeta here, because of metaclass conflicts @@ -65,26 +52,26 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): # # N.B. This has some typing variance problems. See `set_metadata` for an # explanation. - _computed: MutableMapping["CSTNode", MaybeLazyMetadataT] + _computed: MutableMapping["CSTNode", _ProvidedMetadataT] - #: Implement gen_cache to indicate the metadata provider depends on cache from external + #: Implement gen_cache to indicate the matadata provider depends on cache from external #: system. This function will be called by :class:`~libcst.metadata.FullRepoManager` #: to compute required cache object per file path. - gen_cache: Optional[GenCacheMethod] = None + gen_cache: Optional[Callable[[Path, List[str], int], Mapping[str, object]]] = None def __init__(self, cache: object = None) -> None: super().__init__() - self._computed: MutableMapping["CSTNode", MaybeLazyMetadataT] = {} + self._computed = {} if self.gen_cache and cache is None: # The metadata provider implementation is responsible to store and use cache. - raise ValueError( + raise Exception( f"Cache is required for initializing {self.__class__.__name__}." ) self.cache = cache def _gen( self, wrapper: "MetadataWrapper" - ) -> Mapping["CSTNode", MaybeLazyMetadataT]: + ) -> Mapping["CSTNode", _ProvidedMetadataT]: """ Resolves and returns metadata mapping for the module in ``wrapper``. @@ -106,7 +93,11 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): """ ... - def set_metadata(self, node: "CSTNode", value: MaybeLazyMetadataT) -> None: + # pyre-ignore[46]: The covariant `value` isn't type-safe because we write it to + # pyre: `self._computed`, however we assume that only one subclass in the MRO chain + # pyre: will ever call `set_metadata`, so it's okay for our purposes. There's no + # pyre: sane way to redesign this API so that it doesn't have this problem. + def set_metadata(self, node: "CSTNode", value: _ProvidedMetadataT) -> None: """ Record a metadata value ``value`` for ``node``. """ @@ -116,9 +107,7 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): self, key: Type["BaseMetadataProvider[_MetadataT]"], node: "CSTNode", - default: Union[ - MaybeLazyMetadataT, Type[_UNDEFINED_DEFAULT] - ] = _UNDEFINED_DEFAULT, + default: _MetadataT = _UNDEFINED_DEFAULT, ) -> _MetadataT: """ The same method as :func:`~libcst.MetadataDependent.get_metadata` except @@ -127,12 +116,9 @@ class BaseMetadataProvider(MetadataDependent, Generic[_ProvidedMetadataT]): """ if key is type(self): if default is not _UNDEFINED_DEFAULT: - ret = self._computed.get(node, default) + return cast(_MetadataT, self._computed.get(node, default)) else: - ret = self._computed[node] - if isinstance(ret, LazyValue): - return ret() - return ret + return cast(_MetadataT, self._computed[node]) return super().get_metadata(key, node, default) diff --git a/libcst/metadata/expression_context_provider.py b/libcst/metadata/expression_context_provider.py index 955c14ad..beec959d 100644 --- a/libcst/metadata/expression_context_provider.py +++ b/libcst/metadata/expression_context_provider.py @@ -201,7 +201,7 @@ class ExpressionContextVisitor(cst.CSTVisitor): return False -class ExpressionContextProvider(BatchableMetadataProvider[ExpressionContext]): +class ExpressionContextProvider(BatchableMetadataProvider[Optional[ExpressionContext]]): """ Provides :class:`ExpressionContext` metadata (mimics the `expr_context `__ in ast) for the @@ -209,9 +209,9 @@ class ExpressionContextProvider(BatchableMetadataProvider[ExpressionContext]): :class:`~libcst.Attribute`, :class:`~libcst.Subscript`, :class:`~libcst.StarredElement` , :class:`~libcst.List`, :class:`~libcst.Tuple` and :class:`~libcst.Name`. - Note that a :class:`~libcst.Name` may not always have context because of the differences between + Not that a :class:`~libcst.Name` may not always has context because of the differences between ast and LibCST. E.g. :attr:`~libcst.Attribute.attr` is a :class:`~libcst.Name` in LibCST - but a str in ast. To honor ast implementation, we don't assign context to + but a str in ast. To honor ast implementation, we don't assignment context to :attr:`~libcst.Attribute.attr`. diff --git a/libcst/metadata/file_path_provider.py b/libcst/metadata/file_path_provider.py deleted file mode 100644 index 6ab01b5f..00000000 --- a/libcst/metadata/file_path_provider.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from pathlib import Path -from typing import Any, List, Mapping, Optional - -import libcst as cst -from libcst.metadata.base_provider import BatchableMetadataProvider - - -class FilePathProvider(BatchableMetadataProvider[Path]): - """ - Provides the path to the current file on disk as metadata for the root - :class:`~libcst.Module` node. Requires a :class:`~libcst.metadata.FullRepoManager`. - The returned path will always be resolved to an absolute path using - :func:`pathlib.Path.resolve`. - - Example usage: - - .. code:: python - - class CustomVisitor(CSTVisitor): - METADATA_DEPENDENCIES = [FilePathProvider] - - path: pathlib.Path - - def visit_Module(self, node: libcst.Module) -> None: - self.path = self.get_metadata(FilePathProvider, node) - - .. code:: - - >>> mgr = FullRepoManager(".", {"libcst/_types.py"}, {FilePathProvider}) - >>> wrapper = mgr.get_metadata_wrapper_for_path("libcst/_types.py") - >>> fqnames = wrapper.resolve(FilePathProvider) - >>> {type(k): v for k, v in wrapper.resolve(FilePathProvider).items()} - {: PosixPath('/home/user/libcst/_types.py')} - - """ - - @classmethod - def gen_cache( - cls, root_path: Path, paths: List[str], **kwargs: Any - ) -> Mapping[str, Path]: - cache = {path: (root_path / path).resolve() for path in paths} - return cache - - def __init__(self, cache: Path) -> None: - super().__init__(cache) - self.path: Path = cache - - def visit_Module(self, node: cst.Module) -> Optional[bool]: - self.set_metadata(node, self.path) - return False diff --git a/libcst/metadata/full_repo_manager.py b/libcst/metadata/full_repo_manager.py index ab6430d8..2a05475e 100644 --- a/libcst/metadata/full_repo_manager.py +++ b/libcst/metadata/full_repo_manager.py @@ -8,7 +8,6 @@ from pathlib import Path from typing import Collection, Dict, List, Mapping, TYPE_CHECKING import libcst as cst -from libcst._types import StrPath from libcst.metadata.wrapper import MetadataWrapper if TYPE_CHECKING: @@ -18,11 +17,10 @@ if TYPE_CHECKING: class FullRepoManager: def __init__( self, - repo_root_dir: StrPath, + repo_root_dir: str, paths: Collection[str], providers: Collection["ProviderT"], timeout: int = 5, - use_pyproject_toml: bool = False, ) -> None: """ Given project root directory with pyre and watchman setup, :class:`~libcst.metadata.FullRepoManager` @@ -31,15 +29,14 @@ class FullRepoManager: :param paths: a collection of paths to access full repository data. :param providers: a collection of metadata provider classes require accessing full repository data, currently supports - :class:`~libcst.metadata.TypeInferenceProvider` and - :class:`~libcst.metadata.FullyQualifiedNameProvider`. + :class:`~libcst.metadata.TypeInferenceProvider` and + :class:`~libcst.metadata.FullyQualifiedNameProvider`. :param timeout: number of seconds. Raises `TimeoutExpired `_ when timeout. """ self.root_path: Path = Path(repo_root_dir) self._cache: Dict["ProviderT", Mapping[str, object]] = {} self._timeout = timeout - self._use_pyproject_toml = use_pyproject_toml self._providers = providers self._paths: List[str] = list(paths) @@ -67,10 +64,7 @@ class FullRepoManager: handler = provider.gen_cache if handler: cache[provider] = handler( - self.root_path, - self._paths, - timeout=self._timeout, - use_pyproject_toml=self._use_pyproject_toml, + self.root_path, self._paths, self._timeout ) self._cache = cache @@ -85,7 +79,7 @@ class FullRepoManager: MetadataWrapper(module, cache=manager.get_cache_for_path("a.py")) """ if path not in self._paths: - raise ValueError( + raise Exception( "The path needs to be in paths parameter when constructing FullRepoManager for efficient batch processing." ) # Make sure that the cache is available to us. If the user called diff --git a/libcst/metadata/name_provider.py b/libcst/metadata/name_provider.py index 7de76eb5..174aff73 100644 --- a/libcst/metadata/name_provider.py +++ b/libcst/metadata/name_provider.py @@ -4,12 +4,12 @@ # LICENSE file in the root directory of this source tree. import dataclasses +import re from pathlib import Path -from typing import Any, Collection, List, Mapping, Optional, Union +from typing import Collection, List, Mapping, Optional, Pattern, Union import libcst as cst -from libcst._metadata_dependent import LazyValue, MetadataDependent -from libcst.helpers.module import calculate_module_and_package, ModuleNameAndPackage +from libcst._metadata_dependent import MetadataDependent from libcst.metadata.base_provider import BatchableMetadataProvider from libcst.metadata.scope_provider import ( QualifiedName, @@ -78,15 +78,20 @@ class QualifiedNameVisitor(cst.CSTVisitor): def on_visit(self, node: cst.CSTNode) -> bool: scope = self.provider.get_metadata(ScopeProvider, node, None) if scope: - self.provider.set_metadata( - node, LazyValue(lambda: scope.get_qualified_names_for(node)) - ) + self.provider.set_metadata(node, scope.get_qualified_names_for(node)) else: self.provider.set_metadata(node, set()) super().on_visit(node) return True +DOT_PY: Pattern[str] = re.compile(r"(__init__)?\.py$") + + +def _module_name(path: str) -> Optional[str]: + return DOT_PY.sub("", path).replace("/", ".").rstrip(".") + + class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedName]]): """ Provide fully qualified names for CST nodes. Like :class:`QualifiedNameProvider`, @@ -112,28 +117,17 @@ class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedN @classmethod def gen_cache( - cls, - root_path: Path, - paths: List[str], - *, - use_pyproject_toml: bool = False, - **kwargs: Any, - ) -> Mapping[str, ModuleNameAndPackage]: - cache = { - path: calculate_module_and_package( - root_path, path, use_pyproject_toml=use_pyproject_toml - ) - for path in paths - } + cls, root_path: Path, paths: List[str], timeout: Optional[int] = None + ) -> Mapping[str, object]: + cache = {path: _module_name(path) for path in paths} return cache - def __init__(self, cache: ModuleNameAndPackage) -> None: + def __init__(self, cache: str) -> None: super().__init__(cache) - self.module_name: str = cache.name - self.package_name: str = cache.package + self.module_name: str = cache def visit_Module(self, node: cst.Module) -> bool: - visitor = FullyQualifiedNameVisitor(self, self.module_name, self.package_name) + visitor = FullyQualifiedNameVisitor(self, self.module_name) node.visit(visitor) self.set_metadata( node, @@ -144,25 +138,20 @@ class FullyQualifiedNameProvider(BatchableMetadataProvider[Collection[QualifiedN class FullyQualifiedNameVisitor(cst.CSTVisitor): @staticmethod - def _fully_qualify_local(module_name: str, package_name: str, name: str) -> str: - abs_name = name.lstrip(".") - num_dots = len(name) - len(abs_name) - # handle relative import - if num_dots > 0: - name = abs_name - # see importlib._bootstrap._resolve_name - # https://github.com/python/cpython/blob/3.10/Lib/importlib/_bootstrap.py#L902 - bits = package_name.rsplit(".", num_dots - 1) - if len(bits) < num_dots: - raise ImportError("attempted relative import beyond top-level package") - module_name = bits[0] + def _fully_qualify_local(module_name: str, qname: QualifiedName) -> str: + name = qname.name + if not name.startswith("."): + # not a relative import + return f"{module_name}.{name}" - return f"{module_name}.{name}" + # relative import + name = name.lstrip(".") + parts_to_strip = len(qname.name) - len(name) + target_module = ".".join(module_name.split(".")[: -1 * parts_to_strip]) + return f"{target_module}.{name}" @staticmethod - def _fully_qualify( - module_name: str, package_name: str, qname: QualifiedName - ) -> QualifiedName: + def _fully_qualify(module_name: str, qname: QualifiedName) -> QualifiedName: if qname.source == QualifiedNameSource.BUILTIN: # builtins are already fully qualified return qname @@ -170,16 +159,11 @@ class FullyQualifiedNameVisitor(cst.CSTVisitor): if qname.source == QualifiedNameSource.IMPORT and not name.startswith("."): # non-relative imports are already fully qualified return qname - new_name = FullyQualifiedNameVisitor._fully_qualify_local( - module_name, package_name, qname.name - ) + new_name = FullyQualifiedNameVisitor._fully_qualify_local(module_name, qname) return dataclasses.replace(qname, name=new_name) - def __init__( - self, provider: FullyQualifiedNameProvider, module_name: str, package_name: str - ) -> None: + def __init__(self, provider: FullyQualifiedNameProvider, module_name: str) -> None: self.module_name = module_name - self.package_name = package_name self.provider = provider def on_visit(self, node: cst.CSTNode) -> bool: @@ -188,9 +172,7 @@ class FullyQualifiedNameVisitor(cst.CSTVisitor): self.provider.set_metadata( node, { - FullyQualifiedNameVisitor._fully_qualify( - self.module_name, self.package_name, qname - ) + FullyQualifiedNameVisitor._fully_qualify(self.module_name, qname) for qname in qnames }, ) diff --git a/libcst/metadata/scope_provider.py b/libcst/metadata/scope_provider.py index 909a55b2..0a64c5a8 100644 --- a/libcst/metadata/scope_provider.py +++ b/libcst/metadata/scope_provider.py @@ -7,7 +7,7 @@ import abc import builtins from collections import defaultdict -from contextlib import contextmanager, ExitStack +from contextlib import contextmanager from dataclasses import dataclass from enum import auto, Enum from typing import ( @@ -51,10 +51,6 @@ _ASSIGNMENT_LIKE_NODES = ( cst.Nonlocal, cst.Parameters, cst.WithItem, - cst.TypeVar, - cst.TypeAlias, - cst.TypeVarTuple, - cst.ParamSpec, ) @@ -120,7 +116,7 @@ class Access: self.__assignments.add(assignment) def record_assignments(self, name: str) -> None: - assignments = self.scope._resolve_scope_for_access(name, self.scope) + assignments = self.scope[name] # filter out assignments that happened later than this access previous_assignments = { assignment @@ -128,9 +124,7 @@ class Access: if assignment.scope != self.scope or assignment._index < self.__index } if not previous_assignments and assignments and self.scope.parent != self.scope: - previous_assignments = self.scope.parent._resolve_scope_for_access( - name, self.scope - ) + previous_assignments = self.scope.parent[name] self.__assignments |= previous_assignments @@ -199,7 +193,8 @@ class BaseAssignment(abc.ABC): return -1 @abc.abstractmethod - def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: ... + def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: + ... class Assignment(BaseAssignment): @@ -222,16 +217,24 @@ class Assignment(BaseAssignment): return self.__index def get_qualified_names_for(self, full_name: str) -> Set[QualifiedName]: - return { - QualifiedName( - ( - f"{self.scope._name_prefix}.{full_name}" - if self.scope._name_prefix - else full_name - ), - QualifiedNameSource.LOCAL, - ) - } + scope = self.scope + name_prefixes = [] + while scope: + if isinstance(scope, ClassScope): + name_prefixes.append(scope.name) + elif isinstance(scope, FunctionScope): + name_prefixes.append(f"{scope.name}.") + elif isinstance(scope, ComprehensionScope): + name_prefixes.append("") + elif not isinstance(scope, (GlobalScope, BuiltinScope)): + raise Exception(f"Unexpected Scope: {scope}") + + scope = scope.parent if scope.parent != scope else None + + parts = [*reversed(name_prefixes)] + if full_name: + parts.append(full_name) + return {QualifiedName(".".join(parts), QualifiedNameSource.LOCAL)} # even though we don't override the constructor. @@ -301,17 +304,12 @@ class ImportAssignment(Assignment): if eval_alias is not None: as_name = eval_alias if full_name.startswith(as_name): - remaining_name = full_name.split(as_name, 1)[1] - if remaining_name and not remaining_name.startswith("."): - continue - remaining_name = remaining_name.lstrip(".") + remaining_name = full_name.split(as_name, 1)[1].lstrip(".") results.add( QualifiedName( - ( - f"{real_name}.{remaining_name}" - if remaining_name - else real_name - ), + f"{real_name}.{remaining_name}" + if remaining_name + else real_name, QualifiedNameSource.IMPORT, ) ) @@ -333,7 +331,7 @@ class Assignments: def __getitem__(self, node: Union[str, cst.CSTNode]) -> Collection[BaseAssignment]: """Get assignments given a name str or :class:`~libcst.CSTNode` by ``scope.assignments[node]``""" - name = get_full_name_for_node(node) + name = _NameUtil.get_name_for(node) return set(self._assignments[name]) if name in self._assignments else set() def __contains__(self, node: Union[str, cst.CSTNode]) -> bool: @@ -355,7 +353,7 @@ class Accesses: def __getitem__(self, node: Union[str, cst.CSTNode]) -> Collection[Access]: """Get accesses given a name str or :class:`~libcst.CSTNode` by ``scope.accesses[node]``""" - name = get_full_name_for_node(node) + name = _NameUtil.get_name_for(node) return self._accesses[name] if name in self._accesses else set() def __contains__(self, node: Union[str, cst.CSTNode]) -> bool: @@ -363,6 +361,23 @@ class Accesses: return len(self[node]) > 0 +class _NameUtil: + @staticmethod + def get_name_for(node: Union[str, cst.CSTNode]) -> Optional[str]: + """A helper function to retrieve simple name str from a CSTNode or str""" + if isinstance(node, cst.Name): + return node.value + elif isinstance(node, str): + return node + elif isinstance(node, cst.Call): + return _NameUtil.get_name_for(node.func) + elif isinstance(node, cst.Subscript): + return _NameUtil.get_name_for(node.value) + elif isinstance(node, (cst.FunctionDef, cst.ClassDef)): + return _NameUtil.get_name_for(node.name) + return None + + class Scope(abc.ABC): """ Base class of all scope classes. Scope object stores assignments from imports, @@ -388,20 +403,16 @@ class Scope(abc.ABC): #: Refers to the GlobalScope. globals: "GlobalScope" _assignments: MutableMapping[str, Set[BaseAssignment]] + _accesses: MutableMapping[str, Set[Access]] _assignment_count: int - _accesses_by_name: MutableMapping[str, Set[Access]] - _accesses_by_node: MutableMapping[cst.CSTNode, Set[Access]] - _name_prefix: str def __init__(self, parent: "Scope") -> None: super().__init__() self.parent = parent self.globals = parent.globals self._assignments = defaultdict(set) + self._accesses = defaultdict(set) self._assignment_count = 0 - self._accesses_by_name = defaultdict(set) - self._accesses_by_node = defaultdict(set) - self._name_prefix = "" def record_assignment(self, name: str, node: cst.CSTNode) -> None: target = self._find_assignment_target(name) @@ -428,42 +439,26 @@ class Scope(abc.ABC): def _find_assignment_target(self, name: str) -> "Scope": return self + def _find_assignment_target_parent(self, name: str) -> "Scope": + return self + def record_access(self, name: str, access: Access) -> None: - self._accesses_by_name[name].add(access) - self._accesses_by_node[access.node].add(access) + self._accesses[name].add(access) - def _is_visible_from_children(self, from_scope: "Scope") -> bool: - """Returns if the assignments in this scope can be accessed from children. + def _getitem_from_self_or_parent(self, name: str) -> Set[BaseAssignment]: + """Overridden by ClassScope to hide it's assignments from child scopes.""" + return self[name] - This is normally True, except for class scopes:: - - def outer_fn(): - v = ... # outer_fn's declaration - class InnerCls: - v = ... # shadows outer_fn's declaration - class InnerInnerCls: - v = ... # shadows all previous declarations of v - def inner_fn(): - nonlocal v - v = ... # this refers to outer_fn's declaration - # and not to any of the inner classes' as those are - # hidden from their children. - """ - return True - - def _next_visible_parent( - self, from_scope: "Scope", first: Optional["Scope"] = None - ) -> "Scope": - parent = first if first is not None else self.parent - while not parent._is_visible_from_children(from_scope): - parent = parent.parent - return parent + def _contains_in_self_or_parent(self, name: str) -> bool: + """Overridden by ClassScope to hide it's assignments from child scopes.""" + return name in self @abc.abstractmethod def __contains__(self, name: str) -> bool: """Check if the name str exist in current scope by ``name in scope``.""" ... + @abc.abstractmethod def __getitem__(self, name: str) -> Set[BaseAssignment]: """ Get assignments given a name str by ``scope[name]``. @@ -501,21 +496,18 @@ class Scope(abc.ABC): defined a given name by the time a piece of code is executed. For the above example, value would resolve to a set of both assignments. """ - return self._resolve_scope_for_access(name, self) - - @abc.abstractmethod - def _resolve_scope_for_access( - self, name: str, from_scope: "Scope" - ) -> Set[BaseAssignment]: ... + ... def __hash__(self) -> int: return id(self) @abc.abstractmethod - def record_global_overwrite(self, name: str) -> None: ... + def record_global_overwrite(self, name: str) -> None: + ... @abc.abstractmethod - def record_nonlocal_overwrite(self, name: str) -> None: ... + def record_nonlocal_overwrite(self, name: str) -> None: + ... def get_qualified_names_for( self, node: Union[str, cst.CSTNode] @@ -548,41 +540,26 @@ class Scope(abc.ABC): considering it could be a complex type annotation in the string which is hard to resolve, e.g. ``List[Union[int, str]]``. """ - # if this node is an access we know the assignment and we can use that name - node_accesses = ( - self._accesses_by_node.get(node) if isinstance(node, cst.CSTNode) else None - ) - if node_accesses: - return { - qname - for access in node_accesses - for referent in access.referents - for qname in referent.get_qualified_names_for(referent.name) - } - + results = set() full_name = get_full_name_for_node(node) if full_name is None: - return set() - + return results assignments = set() - prefix = full_name - while prefix: + parts = full_name.split(".") + for i in range(len(parts), 0, -1): + prefix = ".".join(parts[:i]) if prefix in self: assignments = self[prefix] break - idx = prefix.rfind(".") - prefix = None if idx == -1 else prefix[:idx] - - if not isinstance(node, str): - for assignment in assignments: - if isinstance(assignment, Assignment) and _is_assignment( - node, assignment.node - ): - return assignment.get_qualified_names_for(full_name) - - results = set() for assignment in assignments: - results |= assignment.get_qualified_names_for(full_name) + names = assignment.get_qualified_names_for(full_name) + if ( + isinstance(assignment, Assignment) + and not isinstance(node, str) + and _is_assignment(node, assignment.node) + ): + return names + results |= names return results @property @@ -593,7 +570,7 @@ class Scope(abc.ABC): @property def accesses(self) -> Accesses: """Return an :class:`~libcst.metadata.Accesses` contains all accesses in current scope.""" - return Accesses(self._accesses_by_name) + return Accesses(self._accesses) class BuiltinScope(Scope): @@ -608,9 +585,7 @@ class BuiltinScope(Scope): def __contains__(self, name: str) -> bool: return hasattr(builtins, name) - def _resolve_scope_for_access( - self, name: str, from_scope: "Scope" - ) -> Set[BaseAssignment]: + def __getitem__(self, name: str) -> Set[BaseAssignment]: if name in self._assignments: return self._assignments[name] if hasattr(builtins, name): @@ -642,16 +617,13 @@ class GlobalScope(Scope): def __contains__(self, name: str) -> bool: if name in self._assignments: return len(self._assignments[name]) > 0 - return name in self._next_visible_parent(self) + return self.parent._contains_in_self_or_parent(name) - def _resolve_scope_for_access( - self, name: str, from_scope: "Scope" - ) -> Set[BaseAssignment]: + def __getitem__(self, name: str) -> Set[BaseAssignment]: if name in self._assignments: return self._assignments[name] - - parent = self._next_visible_parent(from_scope) - return parent[name] + else: + return self.parent._getitem_from_self_or_parent(name) def record_global_overwrite(self, name: str) -> None: pass @@ -676,8 +648,6 @@ class LocalScope(Scope, abc.ABC): self.name = name self.node = node self._scope_overwrites = {} - # pyre-fixme[4]: Attribute `_name_prefix` of class `LocalScope` has type `str` but no type is specified. - self._name_prefix = self._make_name_prefix() def record_global_overwrite(self, name: str) -> None: self._scope_overwrites[name] = self.globals @@ -687,8 +657,7 @@ class LocalScope(Scope, abc.ABC): def _find_assignment_target(self, name: str) -> "Scope": if name in self._scope_overwrites: - scope = self._scope_overwrites[name] - return self._next_visible_parent(self, scope)._find_assignment_target(name) + return self._scope_overwrites[name]._find_assignment_target_parent(name) else: return super()._find_assignment_target(name) @@ -697,26 +666,15 @@ class LocalScope(Scope, abc.ABC): return name in self._scope_overwrites[name] if name in self._assignments: return len(self._assignments[name]) > 0 - return name in self._next_visible_parent(self) + return self.parent._contains_in_self_or_parent(name) - def _resolve_scope_for_access( - self, name: str, from_scope: "Scope" - ) -> Set[BaseAssignment]: + def __getitem__(self, name: str) -> Set[BaseAssignment]: if name in self._scope_overwrites: - scope = self._scope_overwrites[name] - return self._next_visible_parent( - from_scope, scope - )._resolve_scope_for_access(name, from_scope) + return self._scope_overwrites[name]._getitem_from_self_or_parent(name) if name in self._assignments: return self._assignments[name] else: - return self._next_visible_parent(from_scope)._resolve_scope_for_access( - name, from_scope - ) - - def _make_name_prefix(self) -> str: - # filter falsey strings out - return ".".join(filter(None, [self.parent._name_prefix, self.name, ""])) + return self.parent._getitem_from_self_or_parent(name) # even though we don't override the constructor. @@ -734,12 +692,35 @@ class ClassScope(LocalScope): When a class is defined, it creates a ClassScope. """ - def _is_visible_from_children(self, from_scope: "Scope") -> bool: - return from_scope.parent is self and isinstance(from_scope, AnnotationScope) + def _find_assignment_target_parent(self, name: str) -> "Scope": + """ + Forward the assignment to parent. - def _make_name_prefix(self) -> str: - # filter falsey strings out - return ".".join(filter(None, [self.parent._name_prefix, self.name])) + def outer_fn(): + v = ... # outer_fn's declaration + class InnerCls: + v = ... # shadows outer_fn's declaration + def inner_fn(): + nonlocal v + v = ... # this should actually refer to outer_fn's declaration + # and not to InnerCls's, because InnerCls's scope is + # hidden from its children. + + """ + return self.parent._find_assignment_target_parent(name) + + def _getitem_from_self_or_parent(self, name: str) -> Set[BaseAssignment]: + """ + Class variables are only accessible using ClassName.attribute, cls.attribute, or + self.attribute in child scopes. They cannot be accessed with their bare names. + """ + return self.parent._getitem_from_self_or_parent(name) + + def _contains_in_self_or_parent(self, name: str) -> bool: + """ + See :meth:`_getitem_from_self_or_parent` + """ + return self.parent._contains_in_self_or_parent(name) # even though we don't override the constructor. @@ -755,30 +736,14 @@ class ComprehensionScope(LocalScope): # TODO: Assignment expressions (Python 3.8) will complicate ComprehensionScopes, # and will require us to handle such assignments as non-local. # https://www.python.org/dev/peps/pep-0572/#scope-of-the-target - - def _make_name_prefix(self) -> str: - # filter falsey strings out - return ".".join(filter(None, [self.parent._name_prefix, ""])) - - -class AnnotationScope(LocalScope): - """ - Scopes used for type aliases and type parameters as defined by PEP-695. - - These scopes are created for type parameters using the special syntax, as well as - type aliases. See https://peps.python.org/pep-0695/#scoping-behavior for more. - """ - - def _make_name_prefix(self) -> str: - # these scopes are transparent for the purposes of qualified names - return self.parent._name_prefix + pass # Generates dotted names from an Attribute or Name node: # Attribute(value=Name(value="a"), attr=Name(value="b")) -> ("a.b", "a") # each string has the corresponding CSTNode attached to it def _gen_dotted_names( - node: Union[cst.Attribute, cst.Name], + node: Union[cst.Attribute, cst.Name] ) -> Iterator[Tuple[str, Union[cst.Attribute, cst.Name]]]: if isinstance(node, cst.Name): yield node.value, node @@ -841,7 +806,6 @@ class DeferredAccess: class ScopeVisitor(cst.CSTVisitor): # since it's probably not useful. That can makes this visitor cleaner. def __init__(self, provider: "ScopeProvider") -> None: - super().__init__() self.provider: ScopeProvider = provider self.scope: Scope = GlobalScope() self.__deferred_accesses: List[DeferredAccess] = [] @@ -1012,22 +976,15 @@ class ScopeVisitor(cst.CSTVisitor): self.scope.record_assignment(node.name.value, node) self.provider.set_metadata(node.name, self.scope) - with ExitStack() as stack: - if node.type_parameters: - stack.enter_context(self._new_scope(AnnotationScope, node, None)) - node.type_parameters.visit(self) + with self._new_scope(FunctionScope, node, get_full_name_for_node(node.name)): + node.params.visit(self) + node.body.visit(self) - with self._new_scope( - FunctionScope, node, get_full_name_for_node(node.name) - ): - node.params.visit(self) - node.body.visit(self) - - for decorator in node.decorators: - decorator.visit(self) - returns = node.returns - if returns: - returns.visit(self) + for decorator in node.decorators: + decorator.visit(self) + returns = node.returns + if returns: + returns.visit(self) return False @@ -1059,20 +1016,14 @@ class ScopeVisitor(cst.CSTVisitor): self.provider.set_metadata(node.name, self.scope) for decorator in node.decorators: decorator.visit(self) + for base in node.bases: + base.visit(self) + for keyword in node.keywords: + keyword.visit(self) - with ExitStack() as stack: - if node.type_parameters: - stack.enter_context(self._new_scope(AnnotationScope, node, None)) - node.type_parameters.visit(self) - - for base in node.bases: - base.visit(self) - for keyword in node.keywords: - keyword.visit(self) - - with self._new_scope(ClassScope, node, get_full_name_for_node(node.name)): - for statement in node.body.body: - statement.visit(self) + with self._new_scope(ClassScope, node, get_full_name_for_node(node.name)): + for statement in node.body.body: + statement.visit(self) return False def visit_ClassDef_bases(self, node: cst.ClassDef) -> None: @@ -1196,7 +1147,7 @@ class ScopeVisitor(cst.CSTVisitor): access.scope.record_access(name, access) for (scope, name), accesses in scope_name_accesses.items(): - for assignment in scope._resolve_scope_for_access(name, scope): + for assignment in scope[name]: assignment.record_accesses(accesses) self.__deferred_accesses = [] @@ -1207,32 +1158,6 @@ class ScopeVisitor(cst.CSTVisitor): self.scope._assignment_count += 1 super().on_leave(original_node) - def visit_TypeAlias(self, node: cst.TypeAlias) -> Optional[bool]: - self.scope.record_assignment(node.name.value, node) - - with self._new_scope(AnnotationScope, node, None): - if node.type_parameters is not None: - node.type_parameters.visit(self) - node.value.visit(self) - - return False - - def visit_TypeVar(self, node: cst.TypeVar) -> Optional[bool]: - self.scope.record_assignment(node.name.value, node) - - if node.bound is not None: - node.bound.visit(self) - - return False - - def visit_TypeVarTuple(self, node: cst.TypeVarTuple) -> Optional[bool]: - self.scope.record_assignment(node.name.value, node) - return False - - def visit_ParamSpec(self, node: cst.ParamSpec) -> Optional[bool]: - self.scope.record_assignment(node.name.value, node) - return False - class ScopeProvider(BatchableMetadataProvider[Optional[Scope]]): """ diff --git a/libcst/metadata/tests/test_accessor_provider.py b/libcst/metadata/tests/test_accessor_provider.py deleted file mode 100644 index 6ccfad5e..00000000 --- a/libcst/metadata/tests/test_accessor_provider.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -import dataclasses - -from textwrap import dedent - -import libcst as cst -from libcst.metadata import AccessorProvider, MetadataWrapper -from libcst.testing.utils import data_provider, UnitTest - - -class DependentVisitor(cst.CSTVisitor): - METADATA_DEPENDENCIES = (AccessorProvider,) - - def __init__(self, *, test: UnitTest) -> None: - self.test = test - - def on_visit(self, node: cst.CSTNode) -> bool: - for f in dataclasses.fields(node): - child = getattr(node, f.name) - if type(child) is cst.CSTNode: - accessor = self.get_metadata(AccessorProvider, child) - self.test.assertEqual(accessor, f.name) - - return True - - -class AccessorProviderTest(UnitTest): - @data_provider( - ( - ( - """ - foo = 'toplevel' - fn1(foo) - fn2(foo) - def fn_def(): - foo = 'shadow' - fn3(foo) - """, - ), - ( - """ - global_var = None - @cls_attr - class Cls(cls_attr, kwarg=cls_attr): - cls_attr = 5 - def f(): - pass - """, - ), - ( - """ - iterator = None - condition = None - [elt for target in iterator if condition] - {elt for target in iterator if condition} - {elt: target for target in iterator if condition} - (elt for target in iterator if condition) - """, - ), - ) - ) - def test_accessor_provier(self, code: str) -> None: - wrapper = MetadataWrapper(cst.parse_module(dedent(code))) - wrapper.visit(DependentVisitor(test=self)) diff --git a/libcst/metadata/tests/test_base_provider.py b/libcst/metadata/tests/test_base_provider.py index 26ebde70..0bf4ca51 100644 --- a/libcst/metadata/tests/test_base_provider.py +++ b/libcst/metadata/tests/test_base_provider.py @@ -7,7 +7,6 @@ from typing import cast import libcst as cst from libcst import parse_module -from libcst._metadata_dependent import LazyValue from libcst.metadata import ( BatchableMetadataProvider, MetadataWrapper, @@ -76,63 +75,3 @@ class BaseMetadataProviderTest(UnitTest): self.assertEqual(metadata[SimpleProvider][pass_], 1) self.assertEqual(metadata[SimpleProvider][return_], 2) self.assertEqual(metadata[SimpleProvider][pass_2], 1) - - def test_lazy_visitor_provider(self) -> None: - class SimpleLazyProvider(VisitorMetadataProvider[int]): - """ - Sets metadata on every node to a callable that returns 1. - """ - - def on_visit(self, node: cst.CSTNode) -> bool: - self.set_metadata(node, LazyValue(lambda: 1)) - return True - - wrapper = MetadataWrapper(parse_module("pass; return")) - module = wrapper.module - pass_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] - return_ = cast(cst.SimpleStatementLine, module.body[0]).body[1] - - provider = SimpleLazyProvider() - metadata = provider._gen(wrapper) - - # Check access on provider - self.assertEqual(provider.get_metadata(SimpleLazyProvider, module), 1) - self.assertEqual(provider.get_metadata(SimpleLazyProvider, pass_), 1) - self.assertEqual(provider.get_metadata(SimpleLazyProvider, return_), 1) - - # Check returned mapping - self.assertTrue(isinstance(metadata[module], LazyValue)) - self.assertTrue(isinstance(metadata[pass_], LazyValue)) - self.assertTrue(isinstance(metadata[return_], LazyValue)) - - def testlazy_batchable_provider(self) -> None: - class SimpleLazyProvider(BatchableMetadataProvider[int]): - """ - Sets metadata on every pass node to a callable that returns 1, - and every return node to a callable that returns 2. - """ - - def visit_Pass(self, node: cst.Pass) -> None: - self.set_metadata(node, LazyValue(lambda: 1)) - - def visit_Return(self, node: cst.Return) -> None: - self.set_metadata(node, LazyValue(lambda: 2)) - - wrapper = MetadataWrapper(parse_module("pass; return; pass")) - module = wrapper.module - pass_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] - return_ = cast(cst.SimpleStatementLine, module.body[0]).body[1] - pass_2 = cast(cst.SimpleStatementLine, module.body[0]).body[2] - - provider = SimpleLazyProvider() - metadata = _gen_batchable(wrapper, [provider]) - - # Check access on provider - self.assertEqual(provider.get_metadata(SimpleLazyProvider, pass_), 1) - self.assertEqual(provider.get_metadata(SimpleLazyProvider, return_), 2) - self.assertEqual(provider.get_metadata(SimpleLazyProvider, pass_2), 1) - - # Check returned mapping - self.assertTrue(isinstance(metadata[SimpleLazyProvider][pass_], LazyValue)) - self.assertTrue(isinstance(metadata[SimpleLazyProvider][return_], LazyValue)) - self.assertTrue(isinstance(metadata[SimpleLazyProvider][pass_2], LazyValue)) diff --git a/libcst/metadata/tests/test_file_path_provider.py b/libcst/metadata/tests/test_file_path_provider.py deleted file mode 100644 index 2b0631f5..00000000 --- a/libcst/metadata/tests/test_file_path_provider.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from pathlib import Path -from tempfile import TemporaryDirectory -from typing import Set - -import libcst -from libcst._visitors import CSTVisitor -from libcst.helpers.paths import chdir -from libcst.metadata import FilePathProvider, FullRepoManager, MetadataWrapper -from libcst.testing.utils import UnitTest - - -class FilePathProviderTest(UnitTest): - def setUp(self) -> None: - self.td = TemporaryDirectory() - self.tdp = Path(self.td.name).resolve() - self.addCleanup(self.td.cleanup) - - def test_provider_cache(self) -> None: - pkg = self.tdp / "pkg" - pkg.mkdir() - files = [Path(pkg / name) for name in ("file1.py", "file2.py", "file3.py")] - [file.write_text("print('hello')\n") for file in files] - - with self.subTest("absolute paths"): - repo_manager = FullRepoManager( - self.tdp, [f.as_posix() for f in files], {FilePathProvider} - ) - repo_manager.resolve_cache() - - expected = { - FilePathProvider: {f.as_posix(): f for f in files}, - } - self.assertDictEqual(expected, repo_manager.cache) - - with self.subTest("repo relative paths"): - repo_manager = FullRepoManager( - self.tdp, - [f.relative_to(self.tdp).as_posix() for f in files], - {FilePathProvider}, - ) - repo_manager.resolve_cache() - - expected = { - FilePathProvider: { - f.relative_to(self.tdp).as_posix(): f for f in files - }, - } - self.assertDictEqual(expected, repo_manager.cache) - - with self.subTest("dot relative paths"): - with chdir(self.tdp): - repo_manager = FullRepoManager( - ".", - [f.relative_to(self.tdp).as_posix() for f in files], - {FilePathProvider}, - ) - repo_manager.resolve_cache() - - expected = { - FilePathProvider: { - f.relative_to(self.tdp).as_posix(): f for f in files - }, - } - self.assertDictEqual(expected, repo_manager.cache) - - def test_visitor(self) -> None: - pkg = self.tdp / "pkg" - pkg.mkdir() - files = [Path(pkg / name) for name in ("file1.py", "file2.py", "file3.py")] - [file.write_text("print('hello')\n") for file in files] - - seen: Set[Path] = set() - - class FakeVisitor(CSTVisitor): - METADATA_DEPENDENCIES = [FilePathProvider] - - def visit_Module(self, node: libcst.Module) -> None: - seen.add(self.get_metadata(FilePathProvider, node)) - - with self.subTest("absolute paths"): - seen.clear() - repo_manager = FullRepoManager( - self.tdp, [f.as_posix() for f in files], {FilePathProvider} - ) - repo_manager.resolve_cache() - - for file in files: - module = libcst.parse_module(file.read_bytes()) - wrapper = MetadataWrapper( - module, cache=repo_manager.get_cache_for_path(file.as_posix()) - ) - wrapper.visit(FakeVisitor()) - - expected = set(files) - self.assertSetEqual(expected, seen) - - with self.subTest("repo relative paths"): - seen.clear() - repo_manager = FullRepoManager( - self.tdp, - [f.relative_to(self.tdp).as_posix() for f in files], - {FilePathProvider}, - ) - repo_manager.resolve_cache() - - for file in files: - module = libcst.parse_module(file.read_bytes()) - wrapper = MetadataWrapper( - module, - cache=repo_manager.get_cache_for_path( - file.relative_to(self.tdp).as_posix() - ), - ) - wrapper.visit(FakeVisitor()) - - expected = set(files) - self.assertSetEqual(expected, seen) - - with self.subTest("dot relative paths"): - with chdir(self.tdp): - seen.clear() - repo_manager = FullRepoManager( - ".", - [f.relative_to(self.tdp).as_posix() for f in files], - {FilePathProvider}, - ) - repo_manager.resolve_cache() - - for file in files: - module = libcst.parse_module(file.read_bytes()) - wrapper = MetadataWrapper( - module, - cache=repo_manager.get_cache_for_path( - file.relative_to(self.tdp).as_posix() - ), - ) - wrapper.visit(FakeVisitor()) - - expected = set(files) - self.assertSetEqual(expected, seen) diff --git a/libcst/metadata/tests/test_metadata_wrapper.py b/libcst/metadata/tests/test_metadata_wrapper.py index 9063a99a..ee61e14f 100644 --- a/libcst/metadata/tests/test_metadata_wrapper.py +++ b/libcst/metadata/tests/test_metadata_wrapper.py @@ -48,13 +48,9 @@ class MetadataWrapperTest(UnitTest): self.assertNotEqual(hash(mw1), hash(mw3)) self.assertNotEqual(hash(mw2), hash(mw3)) - @staticmethod - def ignore_args(*args: object, **kwargs: object) -> tuple[object, ...]: - return (args, kwargs) - def test_metadata_cache(self) -> None: class DummyMetadataProvider(BatchableMetadataProvider[None]): - gen_cache = self.ignore_args + gen_cache = tuple m = cst.parse_module("pass") mw = MetadataWrapper(m) @@ -64,7 +60,7 @@ class MetadataWrapperTest(UnitTest): mw.resolve(DummyMetadataProvider) class SimpleCacheMetadataProvider(BatchableMetadataProvider[object]): - gen_cache = self.ignore_args + gen_cache = tuple def __init__(self, cache: object) -> None: super().__init__(cache) diff --git a/libcst/metadata/tests/test_name_provider.py b/libcst/metadata/tests/test_name_provider.py index fbd3631a..30a6acd8 100644 --- a/libcst/metadata/tests/test_name_provider.py +++ b/libcst/metadata/tests/test_name_provider.py @@ -6,11 +6,10 @@ from pathlib import Path from tempfile import TemporaryDirectory from textwrap import dedent -from typing import Collection, Dict, Mapping, Optional, Set, Tuple +from typing import Collection, Mapping, Optional, Set, Tuple import libcst as cst from libcst import ensure_type -from libcst._nodes.base import CSTNode from libcst.metadata import ( FullyQualifiedNameProvider, MetadataWrapper, @@ -20,28 +19,14 @@ from libcst.metadata import ( ) from libcst.metadata.full_repo_manager import FullRepoManager from libcst.metadata.name_provider import FullyQualifiedNameVisitor -from libcst.testing.utils import data_provider, UnitTest - - -class QNameVisitor(cst.CSTVisitor): - METADATA_DEPENDENCIES = (QualifiedNameProvider,) - - def __init__(self) -> None: - self.qnames: Dict["CSTNode", Collection[QualifiedName]] = {} - - def on_visit(self, node: cst.CSTNode) -> bool: - qname = self.get_metadata(QualifiedNameProvider, node) - self.qnames[node] = qname - return True +from libcst.testing.utils import UnitTest def get_qualified_name_metadata_provider( module_str: str, ) -> Tuple[cst.Module, Mapping[cst.CSTNode, Collection[QualifiedName]]]: wrapper = MetadataWrapper(cst.parse_module(dedent(module_str))) - visitor = QNameVisitor() - wrapper.visit(visitor) - return wrapper.module, visitor.qnames + return wrapper.module, wrapper.resolve(QualifiedNameProvider) def get_qualified_names(module_str: str) -> Set[QualifiedName]: @@ -54,7 +39,7 @@ def get_fully_qualified_names(file_path: str, module_str: str) -> Set[QualifiedN cst.parse_module(dedent(module_str)), cache={ FullyQualifiedNameProvider: FullyQualifiedNameProvider.gen_cache( - Path(""), [file_path], timeout=None + Path(""), [file_path], None ).get(file_path, "") }, ) @@ -117,8 +102,7 @@ class QualifiedNameProviderTest(UnitTest): cls = ensure_type(m.body[1], cst.ClassDef) f = ensure_type(cls.body.body[0], cst.FunctionDef) self.assertEqual( - names[ensure_type(f.returns, cst.Annotation).annotation], - {QualifiedName("a.b.c", QualifiedNameSource.IMPORT)}, + names[ensure_type(f.returns, cst.Annotation).annotation], set() ) c_call = ensure_type( @@ -373,7 +357,7 @@ class QualifiedNameProviderTest(UnitTest): else: import f import a.b as f - + f() """ ) @@ -430,119 +414,68 @@ class QualifiedNameProviderTest(UnitTest): }, ) - def test_shadowed_assignments(self) -> None: - m, names = get_qualified_name_metadata_provider( - """ - from lib import a,b,c - a = a - class Test: - b = b - def func(): - c = c - """ - ) - - # pyre-fixme[53]: Captured variable `names` is not annotated. - def test_name(node: cst.CSTNode, qnames: Set[QualifiedName]) -> None: - name = ensure_type( - ensure_type(node, cst.SimpleStatementLine).body[0], cst.Assign - ).value - self.assertEqual(names[name], qnames) - - test_name(m.body[1], {QualifiedName("lib.a", QualifiedNameSource.IMPORT)}) - - cls = ensure_type(m.body[2], cst.ClassDef) - test_name( - cls.body.body[0], {QualifiedName("lib.b", QualifiedNameSource.IMPORT)} - ) - - func = ensure_type(m.body[3], cst.FunctionDef) - test_name( - func.body.body[0], {QualifiedName("lib.c", QualifiedNameSource.IMPORT)} - ) - class FullyQualifiedNameProviderTest(UnitTest): - @data_provider( - ( - # test module names - ("a/b/c.py", "", {"a.b.c": QualifiedNameSource.LOCAL}), - ("a/b.py", "", {"a.b": QualifiedNameSource.LOCAL}), - ("a.py", "", {"a": QualifiedNameSource.LOCAL}), - ("a/b/__init__.py", "", {"a.b": QualifiedNameSource.LOCAL}), - ("a/b/__main__.py", "", {"a.b": QualifiedNameSource.LOCAL}), - # test builtinxsx - ( - "test/module.py", - "int(None)", - { - "test.module": QualifiedNameSource.LOCAL, - "builtins.int": QualifiedNameSource.BUILTIN, - "builtins.None": QualifiedNameSource.BUILTIN, - }, - ), - # test imports - ( - "some/test/module.py", - """ - from a.b import c as d - from . import rel - from .lol import rel2 - from .. import thing as rel3 - d, rel, rel2, rel3 - """, - { - "some.test.module": QualifiedNameSource.LOCAL, - "a.b.c": QualifiedNameSource.IMPORT, - "some.test.rel": QualifiedNameSource.IMPORT, - "some.test.lol.rel2": QualifiedNameSource.IMPORT, - "some.thing": QualifiedNameSource.IMPORT, - }, - ), - # test more imports - ( - "some/test/module/__init__.py", - """ - from . import rel - from .lol import rel2 - rel, rel2 - """, - { - "some.test.module": QualifiedNameSource.LOCAL, - "some.test.module.rel": QualifiedNameSource.IMPORT, - "some.test.module.lol.rel2": QualifiedNameSource.IMPORT, - }, - ), - # test locals - ( - "some/test/module.py", - """ - class X: - a: X - """, - { - "some.test.module": QualifiedNameSource.LOCAL, - "some.test.module.X": QualifiedNameSource.LOCAL, - "some.test.module.X.a": QualifiedNameSource.LOCAL, - }, - ), + def test_builtins(self) -> None: + qnames = get_fully_qualified_names( + "test/module.py", + """ + int(None) + """, ) - ) - def test_qnames( - self, file: str, code: str, names: Dict[str, QualifiedNameSource] - ) -> None: - qnames = get_fully_qualified_names(file, code) - self.assertSetEqual( - set(names.keys()), + module_name = QualifiedName( + name="test.module", source=QualifiedNameSource.LOCAL + ) + self.assertIn(module_name, qnames) + qnames -= {module_name} + self.assertEqual( + {"builtins.int", "builtins.None"}, {qname.name for qname in qnames}, ) for qname in qnames: - self.assertEqual(qname.source, names[qname.name], msg=f"{qname}") + self.assertEqual(qname.source, QualifiedNameSource.BUILTIN, msg=f"{qname}") + + def test_imports(self) -> None: + qnames = get_fully_qualified_names( + "some/test/module.py", + """ + from a.b import c as d + from . import rel + from .lol import rel2 + from .. import thing as rel3 + d, rel, rel2, rel3 + """, + ) + module_name = QualifiedName( + name="some.test.module", source=QualifiedNameSource.LOCAL + ) + self.assertIn(module_name, qnames) + qnames -= {module_name} + self.assertEqual( + {"a.b.c", "some.test.rel", "some.test.lol.rel2", "some.thing"}, + {qname.name for qname in qnames}, + ) + for qname in qnames: + self.assertEqual(qname.source, QualifiedNameSource.IMPORT, msg=f"{qname}") + + def test_locals(self) -> None: + qnames = get_fully_qualified_names( + "some/test/module.py", + """ + class X: + a: X + """, + ) + self.assertEqual( + {"some.test.module", "some.test.module.X", "some.test.module.X.a"}, + {qname.name for qname in qnames}, + ) + for qname in qnames: + self.assertEqual(qname.source, QualifiedNameSource.LOCAL, msg=f"{qname}") def test_local_qualification(self) -> None: - module_name = "some.test.module" - package_name = "some.test" - for name, expected in [ + base_module = "some.test.module" + for (name, expected) in [ (".foo", "some.test.foo"), ("..bar", "some.bar"), ("foo", "some.test.module.foo"), @@ -550,7 +483,8 @@ class FullyQualifiedNameProviderTest(UnitTest): with self.subTest(name=name): self.assertEqual( FullyQualifiedNameVisitor._fully_qualify_local( - module_name, package_name, name + base_module, + QualifiedName(name=name, source=QualifiedNameSource.LOCAL), ), expected, ) @@ -559,14 +493,11 @@ class FullyQualifiedNameProviderTest(UnitTest): class FullyQualifiedNameIntegrationTest(UnitTest): def test_with_full_repo_manager(self) -> None: with TemporaryDirectory() as dir: - root = Path(dir) - file_path = root / "pkg/mod.py" - file_path.parent.mkdir() - file_path.touch() - - file_path_str = file_path.as_posix() - mgr = FullRepoManager(root, [file_path_str], [FullyQualifiedNameProvider]) - wrapper = mgr.get_metadata_wrapper_for_path(file_path_str) + fname = "pkg/mod.py" + (Path(dir) / "pkg").mkdir() + (Path(dir) / fname).touch() + mgr = FullRepoManager(dir, [fname], [FullyQualifiedNameProvider]) + wrapper = mgr.get_metadata_wrapper_for_path(fname) fqnames = wrapper.resolve(FullyQualifiedNameProvider) (mod, names) = next(iter(fqnames.items())) self.assertIsInstance(mod, cst.Module) diff --git a/libcst/metadata/tests/test_position_provider.py b/libcst/metadata/tests/test_position_provider.py index 14cecec7..c479837e 100644 --- a/libcst/metadata/tests/test_position_provider.py +++ b/libcst/metadata/tests/test_position_provider.py @@ -83,53 +83,6 @@ class PositionProviderTest(UnitTest): wrapper = MetadataWrapper(parse_module("pass")) wrapper.visit_batched([ABatchable()]) - def test_match_statement_position_metadata(self) -> None: - test = self - - class MatchPositionVisitor(CSTVisitor): - METADATA_DEPENDENCIES = (PositionProvider,) - - def visit_Match(self, node: cst.Match) -> None: - test.assertEqual( - self.get_metadata(PositionProvider, node), - CodeRange((2, 0), (5, 16)), - ) - - def visit_MatchCase(self, node: cst.MatchCase) -> None: - if ( - isinstance(node.pattern, cst.MatchAs) - and node.pattern.name - and node.pattern.name.value == "b" - ): - test.assertEqual( - self.get_metadata(PositionProvider, node), - CodeRange((3, 4), (3, 16)), - ) - elif ( - isinstance(node.pattern, cst.MatchAs) - and node.pattern.name - and node.pattern.name.value == "c" - ): - test.assertEqual( - self.get_metadata(PositionProvider, node), - CodeRange((4, 4), (4, 16)), - ) - elif isinstance(node.pattern, cst.MatchAs) and not node.pattern.name: - test.assertEqual( - self.get_metadata(PositionProvider, node), - CodeRange((5, 4), (5, 16)), - ) - - code = """ -match status: - case b: pass - case c: pass - case _: pass -""" - - wrapper = MetadataWrapper(parse_module(code)) - wrapper.visit(MatchPositionVisitor()) - class PositionProvidingCodegenStateTest(UnitTest): def test_codegen_initial_position(self) -> None: diff --git a/libcst/metadata/tests/test_scope_provider.py b/libcst/metadata/tests/test_scope_provider.py index a367de39..4e65de62 100644 --- a/libcst/metadata/tests/test_scope_provider.py +++ b/libcst/metadata/tests/test_scope_provider.py @@ -14,7 +14,6 @@ from libcst import ensure_type from libcst.metadata import MetadataWrapper from libcst.metadata.scope_provider import ( _gen_dotted_names, - AnnotationScope, Assignment, BuiltinAssignment, BuiltinScope, @@ -252,45 +251,6 @@ class ScopeProviderTest(UnitTest): self.assertEqual(list(scope_of_module["x.y"])[0].references, set()) self.assertEqual(scope_of_module.accesses["x.y"], set()) - def test_dotted_import_access_reference_by_node(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - import a.b.c - a.b.c() - """ - ) - scope_of_module = scopes[m] - first_statement = ensure_type(m.body[1], cst.SimpleStatementLine) - call = ensure_type( - ensure_type(first_statement.body[0], cst.Expr).value, cst.Call - ) - - a_b_c_assignment = cast(ImportAssignment, list(scope_of_module["a.b.c"])[0]) - a_b_c_access = list(a_b_c_assignment.references)[0] - self.assertEqual(scope_of_module.accesses[call], {a_b_c_access}) - self.assertEqual(a_b_c_access.node, call.func) - - def test_decorator_access_reference_by_node(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - import decorator - - @decorator - def f(): - pass - """ - ) - scope_of_module = scopes[m] - function_def = ensure_type(m.body[1], cst.FunctionDef) - decorator = function_def.decorators[0] - self.assertTrue("decorator" in scope_of_module) - - decorator_assignment = cast( - ImportAssignment, list(scope_of_module["decorator"])[0] - ) - decorator_access = list(decorator_assignment.references)[0] - self.assertEqual(scope_of_module.accesses[decorator], {decorator_access}) - def test_dotted_import_with_call_access(self) -> None: m, scopes = get_scope_metadata_provider( """ @@ -652,16 +612,12 @@ class ScopeProviderTest(UnitTest): for assignment in scope_of_outer_f["var"] }, { - ( - outer_f_body_var.targets[0].target - if isinstance(outer_f_body_var, cst.Assign) - else outer_f_body_var - ), - ( - inner_f_body_var.targets[0].target - if isinstance(inner_f_body_var, cst.Assign) - else inner_f_body_var - ), + outer_f_body_var.targets[0].target + if isinstance(outer_f_body_var, cst.Assign) + else outer_f_body_var, + inner_f_body_var.targets[0].target + if isinstance(inner_f_body_var, cst.Assign) + else inner_f_body_var, }, ) @@ -988,25 +944,6 @@ class ScopeProviderTest(UnitTest): {QualifiedName("f4..f5..C", QualifiedNameSource.LOCAL)}, ) - def test_get_qualified_names_for_the_same_prefix(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - from a import b, bc - bc() - """ - ) - call = ensure_type( - ensure_type( - ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.Expr - ).value, - cst.Call, - ) - module_scope = scopes[m] - self.assertEqual( - module_scope.get_qualified_names_for(call.func), - {QualifiedName("a.bc", QualifiedNameSource.IMPORT)}, - ) - def test_get_qualified_names_for_dotted_imports(self) -> None: m, scopes = get_scope_metadata_provider( """ @@ -1489,33 +1426,35 @@ class ScopeProviderTest(UnitTest): def test_global_contains_is_read_only(self) -> None: gscope = GlobalScope() - before_assignments = list(gscope.assignments) - before_accesses = list(gscope.accesses) + before_assignments = list(gscope._assignments.items()) + before_accesses = list(gscope._accesses.items()) self.assertFalse("doesnt_exist" in gscope) - self.assertEqual(list(gscope.accesses), before_accesses) - self.assertEqual(list(gscope.assignments), before_assignments) + self.assertEqual(list(gscope._accesses.items()), before_accesses) + self.assertEqual(list(gscope._assignments.items()), before_assignments) def test_contains_is_read_only(self) -> None: for s in [LocalScope, FunctionScope, ClassScope, ComprehensionScope]: with self.subTest(scope=s): gscope = GlobalScope() scope = s(parent=gscope, node=cst.Name("lol")) - before_assignments = list(scope.assignments) - before_accesses = list(scope.accesses) + before_assignments = list(scope._assignments.items()) + before_accesses = list(scope._accesses.items()) before_overwrites = list(scope._scope_overwrites.items()) - before_parent_assignments = list(scope.parent.assignments) - before_parent_accesses = list(scope.parent.accesses) + before_parent_assignments = list(scope.parent._assignments.items()) + before_parent_accesses = list(scope.parent._accesses.items()) self.assertFalse("doesnt_exist" in scope) - self.assertEqual(list(scope.accesses), before_accesses) - self.assertEqual(list(scope.assignments), before_assignments) + self.assertEqual(list(scope._accesses.items()), before_accesses) + self.assertEqual(list(scope._assignments.items()), before_assignments) self.assertEqual( list(scope._scope_overwrites.items()), before_overwrites ) self.assertEqual( - list(scope.parent.assignments), before_parent_assignments + list(scope.parent._assignments.items()), before_parent_assignments + ) + self.assertEqual( + list(scope.parent._accesses.items()), before_parent_accesses ) - self.assertEqual(list(scope.parent.accesses), before_parent_accesses) def test_attribute_of_function_call(self) -> None: get_scope_metadata_provider("foo().bar") @@ -1538,11 +1477,11 @@ class ScopeProviderTest(UnitTest): ) a = m.body[0] scope = scopes[a] - assignments_before = list(scope.assignments) - accesses_before = list(scope.accesses) + assignments_len_before = len(scope._assignments) + accesses_len_before = len(scope._accesses) scope.get_qualified_names_for("doesnt_exist") - self.assertEqual(list(scope.assignments), assignments_before) - self.assertEqual(list(scope.accesses), accesses_before) + self.assertEqual(len(scope._assignments), assignments_len_before) + self.assertEqual(len(scope._accesses), accesses_len_before) def test_gen_dotted_names(self) -> None: names = {name for name, node in _gen_dotted_names(cst.Name(value="a"))} @@ -1592,20 +1531,19 @@ class ScopeProviderTest(UnitTest): first_assignment = list(global_scope.assignments)[0] assert isinstance(first_assignment, cst.metadata.Assignment) self.assertEqual(first_assignment.node, import_stmt) - global_refs = first_assignment.references + global_refs = list(first_assignment.references) self.assertEqual(len(global_refs), 2) - global_refs_nodes = {x.node for x in global_refs} class_def = ensure_type(m.body[1], cst.ClassDef) x = ensure_type( ensure_type(class_def.body.body[0], cst.SimpleStatementLine).body[0], cst.Assign, ) - self.assertIn(x.value, global_refs_nodes) + self.assertEqual(x.value, global_refs[0].node) class_b = ensure_type( ensure_type(class_def.body.body[1], cst.SimpleStatementLine).body[0], cst.Assign, ) - self.assertIn(class_b.value, global_refs_nodes) + self.assertEqual(class_b.value, global_refs[1].node) class_accesses = list(scopes[x].accesses) self.assertEqual(len(class_accesses), 3) @@ -2008,232 +1946,3 @@ class ScopeProviderTest(UnitTest): self.assertEqual(len(assignment.references), 1) references = list(assignment.references) self.assertTrue(references[0].is_annotation) - - def test_prefix_match(self) -> None: - """Verify that a name doesn't overmatch on prefix""" - m, scopes = get_scope_metadata_provider( - """ - def something(): - ... - """ - ) - scope = scopes[m] - self.assertEqual( - scope.get_qualified_names_for(cst.Name("something")), - {QualifiedName(name="something", source=QualifiedNameSource.LOCAL)}, - ) - self.assertEqual( - scope.get_qualified_names_for(cst.Name("something_else")), - set(), - ) - - def test_type_alias_scope(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - type A = C - lol: A - """ - ) - alias = ensure_type( - ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.TypeAlias - ) - self.assertIsInstance(scopes[alias], GlobalScope) - a_assignments = list(scopes[alias]["A"]) - self.assertEqual(len(a_assignments), 1) - lol = ensure_type( - ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.AnnAssign - ) - self.assertEqual(len(a_references := list(a_assignments[0].references)), 1) - self.assertEqual(a_references[0].node, lol.annotation.annotation) - - self.assertIsInstance(scopes[alias.value], AnnotationScope) - - def test_type_alias_param(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - B = int - type A[T: B] = T - lol: T - """ - ) - alias = ensure_type( - ensure_type(m.body[1], cst.SimpleStatementLine).body[0], cst.TypeAlias - ) - assert alias.type_parameters - param_scope = scopes[alias.type_parameters] - self.assertEqual(len(t_assignments := list(param_scope["T"])), 1) - self.assertEqual(len(t_refs := list(t_assignments[0].references)), 1) - self.assertEqual(t_refs[0].node, alias.value) - - b = ( - ensure_type( - ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.Assign - ) - .targets[0] - .target - ) - b_assignment = list(scopes[b]["B"])[0] - self.assertEqual( - {ref.node for ref in b_assignment.references}, - {ensure_type(alias.type_parameters.params[0].param, cst.TypeVar).bound}, - ) - - def test_type_alias_tuple_and_paramspec(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - type A[*T] = T - lol: T - type A[**T] = T - lol: T - """ - ) - alias_tuple = ensure_type( - ensure_type(m.body[0], cst.SimpleStatementLine).body[0], cst.TypeAlias - ) - assert alias_tuple.type_parameters - param_scope = scopes[alias_tuple.type_parameters] - self.assertEqual(len(t_assignments := list(param_scope["T"])), 1) - self.assertEqual(len(t_refs := list(t_assignments[0].references)), 1) - self.assertEqual(t_refs[0].node, alias_tuple.value) - - alias_paramspec = ensure_type( - ensure_type(m.body[2], cst.SimpleStatementLine).body[0], cst.TypeAlias - ) - assert alias_paramspec.type_parameters - param_scope = scopes[alias_paramspec.type_parameters] - self.assertEqual(len(t_assignments := list(param_scope["T"])), 1) - self.assertEqual(len(t_refs := list(t_assignments[0].references)), 1) - self.assertEqual(t_refs[0].node, alias_paramspec.value) - - def test_class_type_params(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - class W[T]: - def f() -> T: pass - def g[T]() -> T: pass - """ - ) - cls = ensure_type(m.body[0], cst.ClassDef) - cls_scope = scopes[cls.body.body[0]] - self.assertEqual(len(t_assignments_in_cls := list(cls_scope["T"])), 1) - assert cls.type_parameters - self.assertEqual( - ensure_type(t_assignments_in_cls[0], Assignment).node, - cls.type_parameters.params[0].param, - ) - self.assertEqual( - len(t_refs_in_cls := list(t_assignments_in_cls[0].references)), 1 - ) - f = ensure_type(cls.body.body[0], cst.FunctionDef) - assert f.returns - self.assertEqual(t_refs_in_cls[0].node, f.returns.annotation) - - g = ensure_type(cls.body.body[1], cst.FunctionDef) - assert g.type_parameters - assert g.returns - self.assertEqual(len(t_assignments_in_g := list(scopes[g.body]["T"])), 1) - self.assertEqual( - ensure_type(t_assignments_in_g[0], Assignment).node, - g.type_parameters.params[0].param, - ) - self.assertEqual(len(t_refs_in_g := list(t_assignments_in_g[0].references)), 1) - self.assertEqual(t_refs_in_g[0].node, g.returns.annotation) - - def test_nested_class_type_params(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - class Outer: - class Nested[T: Outer]: pass - """ - ) - outer = ensure_type(m.body[0], cst.ClassDef) - outer_refs = list(list(scopes[outer]["Outer"])[0].references) - self.assertEqual(len(outer_refs), 1) - inner = ensure_type(outer.body.body[0], cst.ClassDef) - assert inner.type_parameters - self.assertEqual( - outer_refs[0].node, - ensure_type(inner.type_parameters.params[0].param, cst.TypeVar).bound, - ) - - def test_annotation_refers_to_nested_class(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - class Outer: - class Nested: - pass - - type Alias = Nested - - def meth1[T: Nested](self): pass - def meth2[T](self, arg: Nested): pass - """ - ) - outer = ensure_type(m.body[0], cst.ClassDef) - nested = ensure_type(outer.body.body[0], cst.ClassDef) - alias = ensure_type( - ensure_type(outer.body.body[1], cst.SimpleStatementLine).body[0], - cst.TypeAlias, - ) - self.assertIsInstance(scopes[alias.value], AnnotationScope) - nested_refs_within_alias = list(scopes[alias.value].accesses["Nested"]) - self.assertEqual(len(nested_refs_within_alias), 1) - self.assertEqual( - { - ensure_type(ref, Assignment).node - for ref in nested_refs_within_alias[0].referents - }, - {nested}, - ) - - meth1 = ensure_type(outer.body.body[2], cst.FunctionDef) - self.assertIsInstance(scopes[meth1], ClassScope) - assert meth1.type_parameters - meth1_typevar = ensure_type(meth1.type_parameters.params[0].param, cst.TypeVar) - meth1_typevar_scope = scopes[meth1_typevar] - self.assertIsInstance(meth1_typevar_scope, AnnotationScope) - nested_refs_within_meth1 = list(meth1_typevar_scope.accesses["Nested"]) - self.assertEqual(len(nested_refs_within_meth1), 1) - self.assertEqual( - { - ensure_type(ref, Assignment).node - for ref in nested_refs_within_meth1[0].referents - }, - {nested}, - ) - - meth2 = ensure_type(outer.body.body[3], cst.FunctionDef) - meth2_annotation = meth2.params.params[1].annotation - assert meth2_annotation - nested_refs_within_meth2 = list(scopes[meth2_annotation].accesses["Nested"]) - self.assertEqual(len(nested_refs_within_meth2), 1) - self.assertEqual( - { - ensure_type(ref, Assignment).node - for ref in nested_refs_within_meth2[0].referents - }, - {nested}, - ) - - def test_body_isnt_subject_to_special_annotation_rule(self) -> None: - m, scopes = get_scope_metadata_provider( - """ - class Outer: - class Inner: pass - def f[T: Inner](self): Inner - """ - ) - outer = ensure_type(m.body[0], cst.ClassDef) - # note: this is different from global scope - outer_scope = scopes[outer.body.body[0]] - inner_assignment = list(outer_scope["Inner"])[0] - self.assertEqual(len(inner_assignment.references), 1) - f = ensure_type(outer.body.body[1], cst.FunctionDef) - assert f.type_parameters - T = ensure_type(f.type_parameters.params[0].param, cst.TypeVar) - self.assertIs(list(inner_assignment.references)[0].node, T.bound) - - inner_in_func_body = ensure_type(f.body.body[0], cst.Expr) - f_scope = scopes[inner_in_func_body] - self.assertIn(inner_in_func_body.value, f_scope.accesses) - self.assertEqual(list(f_scope.accesses)[0].referents, set()) diff --git a/libcst/metadata/tests/test_type_inference_provider.py b/libcst/metadata/tests/test_type_inference_provider.py index a0a70a8c..c52a7c8e 100644 --- a/libcst/metadata/tests/test_type_inference_provider.py +++ b/libcst/metadata/tests/test_type_inference_provider.py @@ -9,7 +9,6 @@ import os import subprocess import sys from pathlib import Path -from typing import cast, Mapping, Optional from unittest import skipIf import libcst as cst @@ -58,16 +57,20 @@ def _test_simple_class_helper(test: UnitTest, wrapper: MetadataWrapper) -> None: ) @skipIf(sys.platform == "win32", "TypeInferenceProvider doesn't support windows") class TypeInferenceProviderTest(UnitTest): - maxDiff: Optional[int] = None - @classmethod def setUpClass(cls) -> None: os.chdir(TEST_SUITE_PATH) - subprocess.run(["pyre", "-n", "start", "--no-watchman"]) + try: + subprocess.run(["pyre", "-n", "start", "--no-watchman"]) + except subprocess.TimeoutExpired as exc: + raise exc @classmethod def tearDownClass(cls) -> None: - subprocess.run(["pyre", "-n", "stop"], cwd=TEST_SUITE_PATH) + try: + subprocess.run(["pyre", "-n", "stop"], cwd=TEST_SUITE_PATH) + except subprocess.TimeoutExpired as exc: + raise exc @data_provider( ((TEST_SUITE_PATH / "simple_class.py", TEST_SUITE_PATH / "simple_class.json"),) @@ -76,13 +79,8 @@ class TypeInferenceProviderTest(UnitTest): cache = TypeInferenceProvider.gen_cache( root_path=source_path.parent, paths=[source_path.name], timeout=None ) - result = cast(Mapping[str, object], cache[source_path.name]) data: PyreData = json.loads(data_path.read_text()) - self.assertDictEqual( - data, - result, - "Pyre query result mismatch, try running `scripts/regenerate-fixtures.py`?", - ) + self.assertEqual(data, cache[source_path.name]) @data_provider( ((TEST_SUITE_PATH / "simple_class.py", TEST_SUITE_PATH / "simple_class.json"),) diff --git a/libcst/metadata/type_inference_provider.py b/libcst/metadata/type_inference_provider.py index 8a90c26b..340d1c51 100644 --- a/libcst/metadata/type_inference_provider.py +++ b/libcst/metadata/type_inference_provider.py @@ -6,7 +6,9 @@ import json import subprocess from pathlib import Path -from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, TypedDict +from typing import Dict, List, Mapping, Optional, Sequence, Tuple + +from mypy_extensions import TypedDict import libcst as cst from libcst._position import CodePosition, CodeRange @@ -14,11 +16,6 @@ from libcst.metadata.base_provider import BatchableMetadataProvider from libcst.metadata.position_provider import PositionProvider -class TypeInferenceError(Exception): - """An attempt to access inferred type annotation - (through Pyre Query API) failed.""" - - class Position(TypedDict): line: int column: int @@ -42,7 +39,7 @@ class PyreData(TypedDict, total=False): class TypeInferenceProvider(BatchableMetadataProvider[str]): """ Access inferred type annotation through `Pyre Query API `_. - It requires `setup watchman `_ + It requires `setup watchman `_ and start pyre server by running ``pyre`` command. The inferred type is a string of `type annotation `_. E.g. ``typing.List[libcst._nodes.expression.Name]`` @@ -55,29 +52,26 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): METADATA_DEPENDENCIES = (PositionProvider,) - @classmethod + @staticmethod + # pyre-fixme[40]: Static method `gen_cache` cannot override a non-static method + # defined in `cst.metadata.base_provider.BaseMetadataProvider`. def gen_cache( - cls, - root_path: Path, - paths: List[str], - timeout: Optional[int] = None, - **kwargs: Any, + root_path: Path, paths: List[str], timeout: Optional[int] ) -> Mapping[str, object]: params = ",".join(f"path='{root_path / path}'" for path in paths) cmd_args = ["pyre", "--noninteractive", "query", f"types({params})"] - - result = subprocess.run( - cmd_args, capture_output=True, timeout=timeout, text=True - ) - try: - result.check_returncode() - resp = json.loads(result.stdout)["response"] - except Exception as e: - raise TypeInferenceError( - f"{e}\n\nstderr:\n {result.stderr}\nstdout:\n {result.stdout}" - ) from e + stdout, stderr, return_code = run_command(cmd_args, timeout=timeout) + except subprocess.TimeoutExpired as exc: + raise exc + + if return_code != 0: + raise Exception(f"stderr:\n {stderr}\nstdout:\n {stdout}") + try: + resp = json.loads(stdout)["response"] + except Exception as e: + raise Exception(f"{e}\n\nstderr:\n {stderr}\nstdout:\n {stdout}") return {path: _process_pyre_data(data) for path, data in zip(paths, resp)} def __init__(self, cache: PyreData) -> None: @@ -111,6 +105,13 @@ class TypeInferenceProvider(BatchableMetadataProvider[str]): self._parse_metadata(node) +def run_command( + cmd_args: List[str], timeout: Optional[int] = None +) -> Tuple[str, str, int]: + process = subprocess.run(cmd_args, capture_output=True, timeout=timeout) + return process.stdout.decode(), process.stderr.decode(), process.returncode + + class RawPyreData(TypedDict): path: str types: Sequence[InferredType] diff --git a/libcst/testing/utils.py b/libcst/testing/utils.py index 8a320571..f2557b66 100644 --- a/libcst/testing/utils.py +++ b/libcst/testing/utils.py @@ -64,6 +64,7 @@ def populate_data_provider_tests(dct: Dict[str, Any]) -> None: member_name, member, DATA_PROVIDER_DATA_ATTR_NAME ) if provider_data is not None: + for description, data in ( provider_data.items() if isinstance(provider_data, dict) diff --git a/libcst/tests/__main__.py b/libcst/tests/__main__.py deleted file mode 100644 index df28d1a6..00000000 --- a/libcst/tests/__main__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -from unittest import main - - -if __name__ == "__main__": - main(module=None, verbosity=2) diff --git a/libcst/tests/pyre/simple_class.json b/libcst/tests/pyre/simple_class.json index 85192559..288bb567 100644 --- a/libcst/tests/pyre/simple_class.json +++ b/libcst/tests/pyre/simple_class.json @@ -1,485 +1,511 @@ { "types": [ { - "annotation": "typing.Type[typing.Sequence]", "location": { "start": { - "column": 19, - "line": 7 + "line": 7, + "column": 19 }, "stop": { - "column": 27, - "line": 7 + "line": 7, + "column": 27 } - } + }, + "annotation": "typing.Type[typing.Sequence]" }, { - "annotation": "typing.Type[simple_class.Item]", "location": { "start": { - "column": 6, - "line": 10 + "line": 10, + "column": 6 }, "stop": { - "column": 10, - "line": 10 + "line": 10, + "column": 10 } - } + }, + "annotation": "typing.Type[simple_class.Item]" }, { - "annotation": "typing.Callable(simple_class.Item.__init__)[[Named(self, simple_class.Item), Named(n, int)], None]", "location": { "start": { - "column": 8, - "line": 11 + "line": 11, + "column": 8 }, "stop": { - "column": 16, - "line": 11 + "line": 11, + "column": 16 } - } + }, + "annotation": "typing.Callable(simple_class.Item.__init__)[[Named(self, simple_class.Item), Named(n, int)], None]" }, { - "annotation": "simple_class.Item", "location": { "start": { - "column": 17, - "line": 11 + "line": 11, + "column": 17 }, "stop": { - "column": 21, - "line": 11 + "line": 11, + "column": 21 } - } + }, + "annotation": "simple_class.Item" }, { - "annotation": "int", "location": { "start": { - "column": 23, - "line": 11 + "line": 11, + "column": 23 }, "stop": { - "column": 24, - "line": 11 + "line": 11, + "column": 29 } - } + }, + "annotation": "int" }, { - "annotation": "typing.Type[int]", "location": { "start": { - "column": 26, - "line": 11 + "line": 11, + "column": 26 }, "stop": { - "column": 29, - "line": 11 + "line": 11, + "column": 29 } - } + }, + "annotation": "typing.Type[int]" }, { - "annotation": "typing.Type[None]", "location": { "start": { - "column": 34, - "line": 11 + "line": 11, + "column": 34 }, "stop": { - "column": 38, - "line": 11 + "line": 11, + "column": 38 } - } + }, + "annotation": "None" }, { - "annotation": "simple_class.Item", "location": { "start": { - "column": 8, - "line": 12 + "line": 12, + "column": 8 }, "stop": { - "column": 12, - "line": 12 + "line": 12, + "column": 12 } - } + }, + "annotation": "simple_class.Item" }, { - "annotation": "int", "location": { "start": { - "column": 8, - "line": 12 + "line": 12, + "column": 8 }, "stop": { - "column": 19, - "line": 12 + "line": 12, + "column": 19 } - } + }, + "annotation": "int" }, { - "annotation": "typing.Type[int]", "location": { "start": { - "column": 21, - "line": 12 + "line": 12, + "column": 21 }, "stop": { - "column": 24, - "line": 12 + "line": 12, + "column": 24 } - } + }, + "annotation": "typing.Type[int]" }, { - "annotation": "int", "location": { "start": { - "column": 27, - "line": 12 + "line": 12, + "column": 27 }, "stop": { - "column": 28, - "line": 12 + "line": 12, + "column": 28 } - } + }, + "annotation": "int" }, { - "annotation": "typing.Type[simple_class.ItemCollector]", "location": { "start": { - "column": 6, - "line": 15 + "line": 15, + "column": 6 }, "stop": { - "column": 19, - "line": 15 + "line": 15, + "column": 19 } - } + }, + "annotation": "typing.Type[simple_class.ItemCollector]" }, { - "annotation": "typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]]", "location": { "start": { - "column": 8, - "line": 16 + "line": 16, + "column": 8 }, "stop": { - "column": 17, - "line": 16 + "line": 16, + "column": 17 } - } + }, + "annotation": "typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]]" }, { - "annotation": "simple_class.ItemCollector", "location": { "start": { - "column": 18, - "line": 16 + "line": 16, + "column": 18 }, "stop": { - "column": 22, - "line": 16 + "line": 16, + "column": 22 } - } + }, + "annotation": "simple_class.ItemCollector" }, { - "annotation": "int", "location": { "start": { - "column": 24, - "line": 16 + "line": 16, + "column": 24 }, "stop": { - "column": 25, - "line": 16 + "line": 16, + "column": 30 } - } + }, + "annotation": "int" }, { - "annotation": "typing.Type[int]", "location": { "start": { - "column": 27, - "line": 16 + "line": 16, + "column": 27 }, "stop": { - "column": 30, - "line": 16 + "line": 16, + "column": 30 } - } + }, + "annotation": "typing.Type[int]" }, { - "annotation": "typing.Type[typing.Sequence[simple_class.Item]]", "location": { "start": { - "column": 35, - "line": 16 + "line": 16, + "column": 35 }, "stop": { - "column": 49, - "line": 16 + "line": 16, + "column": 43 } - } + }, + "annotation": "BoundMethod[typing.Callable(typing.GenericMeta.__getitem__)[[Named(self, unknown), typing.Type[Variable[typing._T_co](covariant)]], typing.Type[typing.Sequence[Variable[typing._T_co](covariant)]]], typing.Type[typing.Sequence]]" }, { - "annotation": "typing.List[simple_class.Item]", "location": { "start": { - "column": 15, - "line": 17 + "line": 16, + "column": 35 }, "stop": { - "column": 42, - "line": 17 + "line": 16, + "column": 49 } - } + }, + "annotation": "typing.Type[typing.Sequence[simple_class.Item]]" }, { - "annotation": "typing.Type[simple_class.Item]", "location": { "start": { - "column": 16, - "line": 17 + "line": 16, + "column": 44 }, "stop": { - "column": 20, - "line": 17 + "line": 16, + "column": 48 } - } + }, + "annotation": "typing.Type[simple_class.Item]" }, { - "annotation": "simple_class.Item", "location": { "start": { - "column": 16, - "line": 17 + "line": 17, + "column": 15 }, "stop": { - "column": 23, - "line": 17 + "line": 17, + "column": 42 } - } + }, + "annotation": "typing.List[simple_class.Item]" }, { - "annotation": "int", "location": { "start": { - "column": 28, - "line": 17 + "line": 17, + "column": 16 }, "stop": { - "column": 29, - "line": 17 + "line": 17, + "column": 20 } - } + }, + "annotation": "typing.Type[simple_class.Item]" }, { - "annotation": "typing.Type[range]", "location": { "start": { - "column": 33, - "line": 17 + "line": 17, + "column": 16 }, "stop": { - "column": 38, - "line": 17 + "line": 17, + "column": 23 } - } + }, + "annotation": "simple_class.Item" }, { - "annotation": "range", "location": { "start": { - "column": 33, - "line": 17 + "line": 17, + "column": 28 }, "stop": { - "column": 41, - "line": 17 + "line": 17, + "column": 29 } - } + }, + "annotation": "int" }, { - "annotation": "int", "location": { "start": { - "column": 39, - "line": 17 + "line": 17, + "column": 33 }, "stop": { - "column": 40, - "line": 17 + "line": 17, + "column": 38 } - } + }, + "annotation": "typing.Type[range]" }, { - "annotation": "simple_class.ItemCollector", "location": { "start": { - "column": 0, - "line": 20 + "line": 17, + "column": 33 }, "stop": { - "column": 9, - "line": 20 + "line": 17, + "column": 41 } - } + }, + "annotation": "range" }, { - "annotation": "typing.Type[simple_class.ItemCollector]", "location": { "start": { - "column": 12, - "line": 20 + "line": 17, + "column": 39 }, "stop": { - "column": 25, - "line": 20 + "line": 17, + "column": 40 } - } + }, + "annotation": "int" }, { - "annotation": "simple_class.ItemCollector", "location": { "start": { - "column": 12, - "line": 20 + "line": 20, + "column": 0 }, "stop": { - "column": 27, - "line": 20 + "line": 20, + "column": 9 } - } + }, + "annotation": "simple_class.ItemCollector" }, { - "annotation": "typing.Sequence[simple_class.Item]", "location": { "start": { - "column": 0, - "line": 21 + "line": 20, + "column": 12 }, "stop": { - "column": 5, - "line": 21 + "line": 20, + "column": 25 } - } + }, + "annotation": "typing.Type[simple_class.ItemCollector]" }, { - "annotation": "typing.Type[typing.Sequence[simple_class.Item]]", "location": { "start": { - "column": 7, - "line": 21 + "line": 20, + "column": 12 }, "stop": { - "column": 21, - "line": 21 + "line": 20, + "column": 27 } - } + }, + "annotation": "simple_class.ItemCollector" }, { - "annotation": "simple_class.ItemCollector", "location": { "start": { - "column": 24, - "line": 21 + "line": 21, + "column": 0 }, "stop": { - "column": 33, - "line": 21 + "line": 21, + "column": 5 } - } + }, + "annotation": "typing.Sequence[simple_class.Item]" }, { - "annotation": "BoundMethod[typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]], simple_class.ItemCollector]", "location": { "start": { - "column": 24, - "line": 21 + "line": 21, + "column": 7 }, "stop": { - "column": 43, - "line": 21 + "line": 21, + "column": 21 } - } + }, + "annotation": "typing.Type[typing.Sequence[simple_class.Item]]" }, { - "annotation": "typing.Sequence[simple_class.Item]", "location": { "start": { - "column": 24, - "line": 21 + "line": 21, + "column": 24 }, "stop": { - "column": 46, - "line": 21 + "line": 21, + "column": 33 } - } + }, + "annotation": "simple_class.ItemCollector" }, { - "annotation": "typing_extensions.Literal[3]", "location": { "start": { - "column": 44, - "line": 21 + "line": 21, + "column": 24 }, "stop": { - "column": 45, - "line": 21 + "line": 21, + "column": 43 } - } + }, + "annotation": "BoundMethod[typing.Callable(simple_class.ItemCollector.get_items)[[Named(self, simple_class.ItemCollector), Named(n, int)], typing.Sequence[simple_class.Item]], simple_class.ItemCollector]" }, { - "annotation": "simple_class.Item", "location": { "start": { - "column": 4, - "line": 22 + "line": 21, + "column": 24 }, "stop": { - "column": 8, - "line": 22 + "line": 21, + "column": 46 } - } + }, + "annotation": "typing.Sequence[simple_class.Item]" }, { - "annotation": "simple_class.Item", "location": { "start": { - "column": 12, - "line": 22 + "line": 21, + "column": 44 }, "stop": { - "column": 17, - "line": 22 + "line": 21, + "column": 45 } - } + }, + "annotation": "typing_extensions.Literal[3]" }, { - "annotation": "simple_class.Item", "location": { "start": { - "column": 4, - "line": 23 + "line": 22, + "column": 4 }, "stop": { - "column": 8, - "line": 23 + "line": 22, + "column": 8 } - } + }, + "annotation": "simple_class.Item" }, { - "annotation": "int", "location": { "start": { - "column": 4, - "line": 23 + "line": 22, + "column": 12 }, "stop": { - "column": 15, - "line": 23 + "line": 22, + "column": 17 } - } + }, + "annotation": "typing.Sequence[simple_class.Item]" + }, + { + "location": { + "start": { + "line": 23, + "column": 4 + }, + "stop": { + "line": 23, + "column": 8 + } + }, + "annotation": "simple_class.Item" + }, + { + "location": { + "start": { + "line": 23, + "column": 4 + }, + "stop": { + "line": 23, + "column": 15 + } + }, + "annotation": "int" } ] } \ No newline at end of file diff --git a/libcst/tests/test_add_slots.py b/libcst/tests/test_add_slots.py deleted file mode 100644 index e354f60b..00000000 --- a/libcst/tests/test_add_slots.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -import pickle -from dataclasses import dataclass -from typing import ClassVar - -from libcst._add_slots import add_slots - -from libcst.testing.utils import UnitTest - - -# this test class needs to be defined at module level to test pickling. -@add_slots -@dataclass(frozen=True) -class A: - x: int - y: str - - Z: ClassVar[int] = 5 - - -class AddSlotsTest(UnitTest): - def test_pickle(self) -> None: - a = A(1, "foo") - self.assertEqual(a, pickle.loads(pickle.dumps(a))) - object.__delattr__(a, "y") - self.assertEqual(a.x, pickle.loads(pickle.dumps(a)).x) - - def test_prevents_slots_overlap(self) -> None: - class A: - __slots__ = ("x",) - - class B(A): - __slots__ = ("z",) - - @add_slots - @dataclass - class C(B): - x: int - y: str - z: bool - - self.assertSequenceEqual(C.__slots__, ("y",)) diff --git a/libcst/tests/test_batched_visitor.py b/libcst/tests/test_batched_visitor.py index 9009847c..9bcc562f 100644 --- a/libcst/tests/test_batched_visitor.py +++ b/libcst/tests/test_batched_visitor.py @@ -16,57 +16,57 @@ class BatchedVisitorTest(UnitTest): mock = Mock() class ABatchable(BatchableCSTVisitor): - def visit_Del(self, node: cst.Del) -> None: - object.__setattr__(node, "target", mock.visited_a()) + def visit_Pass(self, node: cst.Pass) -> None: + mock.visited_a() + object.__setattr__(node, "a_attr", True) class BBatchable(BatchableCSTVisitor): - def visit_Del(self, node: cst.Del) -> None: - object.__setattr__(node, "semicolon", mock.visited_b()) + def visit_Pass(self, node: cst.Pass) -> None: + mock.visited_b() + object.__setattr__(node, "b_attr", 1) - module = visit_batched(parse_module("del a"), [ABatchable(), BBatchable()]) - del_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] + module = visit_batched(parse_module("pass"), [ABatchable(), BBatchable()]) + pass_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] + + # Check properties were set + self.assertEqual(object.__getattribute__(pass_, "a_attr"), True) + self.assertEqual(object.__getattribute__(pass_, "b_attr"), 1) # Check that each visitor was only called once mock.visited_a.assert_called_once() mock.visited_b.assert_called_once() - # Check properties were set - self.assertEqual(object.__getattribute__(del_, "target"), mock.visited_a()) - self.assertEqual(object.__getattribute__(del_, "semicolon"), mock.visited_b()) - def test_all_visits(self) -> None: mock = Mock() class Batchable(BatchableCSTVisitor): - def visit_If(self, node: cst.If) -> None: - object.__setattr__(node, "test", mock.visit_If()) + def visit_Pass(self, node: cst.Pass) -> None: + mock.visit_Pass() + object.__setattr__(node, "visit_Pass", True) - def visit_If_body(self, node: cst.If) -> None: - object.__setattr__(node, "leading_lines", mock.visit_If_body()) + def visit_Pass_semicolon(self, node: cst.Pass) -> None: + mock.visit_Pass_semicolon() + object.__setattr__(node, "visit_Pass_semicolon", True) - def leave_If_body(self, node: cst.If) -> None: - object.__setattr__(node, "orelse", mock.leave_If_body()) + def leave_Pass_semicolon(self, node: cst.Pass) -> None: + mock.leave_Pass_semicolon() + object.__setattr__(node, "leave_Pass_semicolon", True) - def leave_If(self, original_node: cst.If) -> None: - object.__setattr__( - original_node, "whitespace_before_test", mock.leave_If() - ) + def leave_Pass(self, original_node: cst.Pass) -> None: + mock.leave_Pass() + object.__setattr__(original_node, "leave_Pass", True) - module = visit_batched(parse_module("if True: pass"), [Batchable()]) - if_ = cast(cst.SimpleStatementLine, module.body[0]) - - # Check that each visitor was only called once - mock.visit_If.assert_called_once() - mock.leave_If.assert_called_once() - mock.visit_If_body.assert_called_once() - mock.leave_If_body.assert_called_once() + module = visit_batched(parse_module("pass"), [Batchable()]) + pass_ = cast(cst.SimpleStatementLine, module.body[0]).body[0] # Check properties were set - self.assertEqual(object.__getattribute__(if_, "test"), mock.visit_If()) - self.assertEqual( - object.__getattribute__(if_, "leading_lines"), mock.visit_If_body() - ) - self.assertEqual(object.__getattribute__(if_, "orelse"), mock.leave_If_body()) - self.assertEqual( - object.__getattribute__(if_, "whitespace_before_test"), mock.leave_If() - ) + self.assertEqual(object.__getattribute__(pass_, "visit_Pass"), True) + self.assertEqual(object.__getattribute__(pass_, "leave_Pass"), True) + self.assertEqual(object.__getattribute__(pass_, "visit_Pass_semicolon"), True) + self.assertEqual(object.__getattribute__(pass_, "leave_Pass_semicolon"), True) + + # Check that each visitor was only called once + mock.visit_Pass.assert_called_once() + mock.leave_Pass.assert_called_once() + mock.visit_Pass_semicolon.assert_called_once() + mock.leave_Pass_semicolon.assert_called_once() diff --git a/libcst/tests/test_e2e.py b/libcst/tests/test_e2e.py index e6dfdb5c..ecdc23ba 100644 --- a/libcst/tests/test_e2e.py +++ b/libcst/tests/test_e2e.py @@ -2,7 +2,7 @@ import contextlib import os from pathlib import Path from tempfile import TemporaryDirectory -from typing import Dict, Generator +from typing import Generator from unittest import TestCase from libcst import BaseExpression, Call, matchers as m, Name @@ -16,14 +16,7 @@ from libcst.codemod.visitors import AddImportsVisitor class PrintToPPrintCommand(VisitorBasedCodemodCommand): - def __init__(self, context: CodemodContext, **kwargs: Dict[str, object]) -> None: - super().__init__(context, **kwargs) - self.context.scratch["PPRINT_WAS_HERE"] = True - def leave_Call(self, original_node: Call, updated_node: Call) -> BaseExpression: - if not self.context.scratch["PPRINT_WAS_HERE"]: - raise AssertionError("Scratch space lost") - if m.matches(updated_node, m.Call(func=m.Name("print"))): AddImportsVisitor.add_needed_import( self.context, @@ -48,45 +41,39 @@ def temp_workspace() -> Generator[Path, None, None]: class ToolE2ETest(TestCase): def test_leaky_codemod(self) -> None: - for msg, command in [ - ("instantiated", PrintToPPrintCommand(CodemodContext())), - ("class", PrintToPPrintCommand), - ]: - with self.subTest(msg), temp_workspace() as tmp: - # File to trigger codemod - example: Path = tmp / "example.py" - example.write_text("""print("Hello")""") - # File that should not be modified - other = tmp / "other.py" - other.touch() - # Just a dir named "dir.py", should be ignored - adir = tmp / "dir.py" - adir.mkdir() + with temp_workspace() as tmp: + # File to trigger codemod + example: Path = tmp / "example.py" + example.write_text("""print("Hello")""") + # File that should not be modified + other = tmp / "other.py" + other.touch() - # Run command - files = gather_files(".") - result = parallel_exec_transform_with_prettyprint( - command, - files, - format_code=False, - hide_progress=True, - ) + # Run command + command_instance = PrintToPPrintCommand(CodemodContext()) + files = gather_files(".") + result = parallel_exec_transform_with_prettyprint( + command_instance, + files, + format_code=False, + hide_progress=True, + ) - print(result) + print(result) - # Check results - self.assertEqual(2, result.successes) - self.assertEqual(0, result.skips) - self.assertEqual(0, result.failures) - # Expect example.py to be modified - self.assertIn( - "from pprint import pprint", - example.read_text(), - "import missing in example.py", - ) - # Expect other.py to NOT be modified - self.assertNotIn( - "from pprint import pprint", - other.read_text(), - "import found in other.py", - ) + # Check results + self.assertEqual(2, result.successes) + self.assertEqual(0, result.skips) + self.assertEqual(0, result.failures) + # Expect example.py to be modified + self.assertIn( + "from pprint import pprint", + example.read_text(), + "import missing in example.py", + ) + # Expect other.py to NOT be modified + self.assertNotIn( + "from pprint import pprint", + other.read_text(), + "import found in other.py", + ) diff --git a/libcst/tests/test_fuzz.py b/libcst/tests/test_fuzz.py index 6ec95136..66b32276 100644 --- a/libcst/tests/test_fuzz.py +++ b/libcst/tests/test_fuzz.py @@ -50,9 +50,6 @@ class FuzzTest(unittest.TestCase): @unittest.skipUnless( bool(os.environ.get("HYPOTHESIS", False)), "Hypothesis not requested" ) - # pyre-fixme[56]: Pyre was not able to infer the type of the decorator - # `hypothesis.given($parameter$source_code = - # hypothesmith.from_grammar($parameter$start = "file_input"))`. @hypothesis.given(source_code=from_grammar(start="file_input")) def test_parsing_compilable_module_strings(self, source_code: str) -> None: """The `from_grammar()` strategy generates strings from Python's grammar. @@ -80,9 +77,6 @@ class FuzzTest(unittest.TestCase): @unittest.skipUnless( bool(os.environ.get("HYPOTHESIS", False)), "Hypothesis not requested" ) - # pyre-fixme[56]: Pyre was not able to infer the type of the decorator - # `hypothesis.given($parameter$source_code = - # hypothesmith.from_grammar($parameter$start = "eval_input").map(str.strip))`. @hypothesis.given(source_code=from_grammar(start="eval_input").map(str.strip)) def test_parsing_compilable_expression_strings(self, source_code: str) -> None: """Much like statements, but for expressions this time. @@ -111,10 +105,6 @@ class FuzzTest(unittest.TestCase): @unittest.skipUnless( bool(os.environ.get("HYPOTHESIS", False)), "Hypothesis not requested" ) - # pyre-fixme[56]: Pyre was not able to infer the type of the decorator - # `hypothesis.given($parameter$source_code = - # hypothesmith.from_grammar($parameter$start = "single_input").map(lambda - # ($parameter$s) (s.replace(" @hypothesis.given( source_code=from_grammar(start="single_input").map( lambda s: s.replace("\n", "") + "\n" diff --git a/libcst/tests/test_import.py b/libcst/tests/test_import.py deleted file mode 100644 index cad8883d..00000000 --- a/libcst/tests/test_import.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -from unittest import TestCase - - -class TestImport(TestCase): - def test_import_libcst(self) -> None: - import libcst # noqa: F401 diff --git a/libcst/tests/test_pyre_integration.py b/libcst/tests/test_pyre_integration.py index 679b2d5e..021385af 100644 --- a/libcst/tests/test_pyre_integration.py +++ b/libcst/tests/test_pyre_integration.py @@ -5,12 +5,17 @@ import json +import os from pathlib import Path from typing import Dict, List, Mapping, Optional, Tuple, Union import libcst as cst from libcst.metadata import MetadataWrapper, PositionProvider -from libcst.metadata.type_inference_provider import PyreData +from libcst.metadata.type_inference_provider import ( + _process_pyre_data, + PyreData, + run_command, +) from libcst.testing.utils import data_provider, UnitTest TEST_SUITE_PATH: Path = Path(__file__).parent / "pyre" @@ -112,7 +117,29 @@ class PyreIntegrationTest(UnitTest): if __name__ == "__main__": - import sys + """Run this script directly to generate pyre data for test suite (tests/pyre/*.py)""" + print("start pyre server") + stdout: str + stderr: str + return_code: int + os.chdir(TEST_SUITE_PATH) + stdout, stderr, return_code = run_command(["pyre", "start", "--no-watchman"]) + if return_code != 0: + print(stdout) + print(stderr) - print("run `scripts/regenerate-fixtures.py` instead") - sys.exit(1) + for path in TEST_SUITE_PATH.glob("*.py"): + # Pull params into it's own arg to avoid the string escaping in subprocess + params = f"path='{path}'" + cmd = ["pyre", "query", f"types({params})"] + print(cmd) + stdout, stderr, return_code = run_command(cmd) + if return_code != 0: + print(stdout) + print(stderr) + data = json.loads(stdout) + data = data["response"][0] + data = _process_pyre_data(data) + output_path = path.with_suffix(".json") + print(f"write output to {output_path}") + output_path.write_text(json.dumps(data, indent=2)) diff --git a/libcst/tests/test_roundtrip.py b/libcst/tests/test_roundtrip.py deleted file mode 100644 index 96d1e507..00000000 --- a/libcst/tests/test_roundtrip.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - - -from pathlib import Path -from unittest import TestCase - -from libcst import CSTTransformer, parse_module - - -fixtures: Path = Path(__file__).parent.parent.parent / "native/libcst/tests/fixtures" - - -class NOOPTransformer(CSTTransformer): - pass - - -class RoundTripTests(TestCase): - def _get_fixtures(self) -> list[Path]: - self.assertTrue(fixtures.exists(), f"{fixtures} should exist") - files = list(fixtures.iterdir()) - self.assertGreater(len(files), 0) - return files - - def test_clean_roundtrip(self) -> None: - for file in self._get_fixtures(): - with self.subTest(file=str(file)): - src = file.read_text(encoding="utf-8") - mod = parse_module(src) - self.maxDiff = None - self.assertEqual(mod.code, src) - - def test_transform_roundtrip(self) -> None: - transformer = NOOPTransformer() - self.maxDiff = None - for file in self._get_fixtures(): - with self.subTest(file=str(file)): - src = file.read_text(encoding="utf-8") - mod = parse_module(src) - new_mod = mod.visit(transformer) - self.assertEqual(src, new_mod.code) diff --git a/libcst/display/tests/test_dump_text.py b/libcst/tests/test_tool.py similarity index 98% rename from libcst/display/tests/test_dump_text.py rename to libcst/tests/test_tool.py index bbfc6eb0..929d7225 100644 --- a/libcst/display/tests/test_dump_text.py +++ b/libcst/tests/test_tool.py @@ -10,7 +10,7 @@ from libcst.testing.utils import UnitTest from libcst.tool import dump -class CSTDumpTextTest(UnitTest): +class PrettyPrintNodesTest(UnitTest): def test_full_tree(self) -> None: module = r""" Module( @@ -152,10 +152,6 @@ class CSTDumpTextTest(UnitTest): whitespace_before_colon=SimpleWhitespace( value='', ), - type_parameters=None, - whitespace_after_type_parameters=SimpleWhitespace( - value='', - ), ), ], header=[], @@ -247,7 +243,6 @@ class CSTDumpTextTest(UnitTest): ), ), asynchronous=None, - type_parameters=None, ), ], encoding='utf-8', @@ -537,10 +532,6 @@ class CSTDumpTextTest(UnitTest): whitespace_before_colon=SimpleWhitespace( value='', ), - type_parameters=None, - whitespace_after_type_parameters=SimpleWhitespace( - value='', - ), ), ], header=[], @@ -621,7 +612,6 @@ class CSTDumpTextTest(UnitTest): ), ), asynchronous=None, - type_parameters=None, ), ], ) diff --git a/libcst/tests/test_type_enforce.py b/libcst/tests/test_type_enforce.py index f6fecc7d..7c01b82b 100644 --- a/libcst/tests/test_type_enforce.py +++ b/libcst/tests/test_type_enforce.py @@ -11,7 +11,6 @@ from typing import ( Dict, Iterable, List, - Literal, Mapping, MutableMapping, NamedTuple, @@ -24,6 +23,8 @@ from typing import ( Union, ) +from typing_extensions import Literal + from libcst._type_enforce import is_value_of_type from libcst.testing.utils import data_provider, UnitTest diff --git a/libcst/tool.py b/libcst/tool.py index a2164b11..bac61186 100644 --- a/libcst/tool.py +++ b/libcst/tool.py @@ -9,23 +9,28 @@ # python -m libcst.tool print python_file.py import argparse +import dataclasses +import distutils.spawn import importlib import inspect import os import os.path -import shutil import sys import textwrap from abc import ABC, abstractmethod -from typing import Any, Callable, Dict, List, Tuple, Type +from typing import Any, Callable, Dict, List, Sequence, Tuple, Type -try: - import yaml_ft as yaml # pyre-ignore -except ModuleNotFoundError: - import yaml +import yaml -from libcst import CSTLogicError, LIBCST_VERSION, parse_module, PartialParserConfig -from libcst._parser.parso.utils import parse_version_string +from libcst import ( + CSTNode, + IndentedBlock, + LIBCST_VERSION, + Module, + parse_module, + PartialParserConfig, +) +from libcst._nodes.deep_equals import deep_equals from libcst.codemod import ( CodemodCommand, CodemodContext, @@ -34,8 +39,187 @@ from libcst.codemod import ( gather_files, parallel_exec_transform_with_prettyprint, ) -from libcst.display import dump, dump_graphviz -from libcst.display.text import _DEFAULT_INDENT + +_DEFAULT_INDENT: str = " " + + +def _node_repr_recursive( # noqa: C901 + node: object, + *, + indent: str = _DEFAULT_INDENT, + show_defaults: bool = False, + show_syntax: bool = False, + show_whitespace: bool = False, +) -> List[str]: + if isinstance(node, CSTNode): + # This is a CSTNode, we must pretty-print it. + tokens: List[str] = [node.__class__.__name__] + fields: Sequence["dataclasses.Field[object]"] = dataclasses.fields(node) + + # Hide all fields prefixed with "_" + fields = [f for f in fields if f.name[0] != "_"] + + # Filter whitespace nodes if needed + if not show_whitespace: + + def _is_whitespace(field: "dataclasses.Field[object]") -> bool: + if "whitespace" in field.name: + return True + if "leading_lines" in field.name: + return True + if "lines_after_decorators" in field.name: + return True + if isinstance(node, (IndentedBlock, Module)) and field.name in [ + "header", + "footer", + ]: + return True + if isinstance(node, IndentedBlock) and field.name == "indent": + return True + return False + + fields = [f for f in fields if not _is_whitespace(f)] + # Filter values which aren't changed from their defaults + if not show_defaults: + + def _get_default(fld: "dataclasses.Field[object]") -> object: + if fld.default_factory is not dataclasses.MISSING: + return fld.default_factory() + return fld.default + + fields = [ + f + for f in fields + if not deep_equals(getattr(node, f.name), _get_default(f)) + ] + # Filter out values which aren't interesting if needed + if not show_syntax: + + def _is_syntax(field: "dataclasses.Field[object]") -> bool: + if isinstance(node, Module) and field.name in [ + "encoding", + "default_indent", + "default_newline", + "has_trailing_newline", + ]: + return True + type_str = repr(field.type) + if ( + "Sentinel" in type_str + and field.name not in ["star_arg", "star", "posonly_ind"] + and "whitespace" not in field.name + ): + # This is a value that can optionally be specified, so its + # definitely syntax. + return True + + for name in ["Semicolon", "Colon", "Comma", "Dot", "AssignEqual"]: + # These are all nodes that exist for separation syntax + if name in type_str: + return True + + return False + + fields = [f for f in fields if not _is_syntax(f)] + + if len(fields) == 0: + tokens.append("()") + else: + tokens.append("(\n") + + for field in fields: + child_tokens: List[str] = [field.name, "="] + value = getattr(node, field.name) + + if isinstance(value, (str, bytes)) or not isinstance(value, Sequence): + # Render out the node contents + child_tokens.extend( + _node_repr_recursive( + value, + indent=indent, + show_whitespace=show_whitespace, + show_defaults=show_defaults, + show_syntax=show_syntax, + ) + ) + elif isinstance(value, Sequence): + # Render out a list of individual nodes + if len(value) > 0: + child_tokens.append("[\n") + list_tokens: List[str] = [] + + last_value = len(value) - 1 + for j, v in enumerate(value): + list_tokens.extend( + _node_repr_recursive( + v, + indent=indent, + show_whitespace=show_whitespace, + show_defaults=show_defaults, + show_syntax=show_syntax, + ) + ) + if j != last_value: + list_tokens.append(",\n") + else: + list_tokens.append(",") + + split_by_line = "".join(list_tokens).split("\n") + child_tokens.append( + "\n".join(f"{indent}{t}" for t in split_by_line) + ) + + child_tokens.append("\n]") + else: + child_tokens.append("[]") + else: + raise Exception("Logic error!") + + # Handle indentation and trailing comma. + split_by_line = "".join(child_tokens).split("\n") + tokens.append("\n".join(f"{indent}{t}" for t in split_by_line)) + tokens.append(",\n") + + tokens.append(")") + + return tokens + else: + # This is a python value, just return the repr + return [repr(node)] + + +def dump( + node: CSTNode, + *, + indent: str = _DEFAULT_INDENT, + show_defaults: bool = False, + show_syntax: bool = False, + show_whitespace: bool = False, +) -> str: + """ + Returns a string representation of the node that contains minimal differences + from the default contruction of the node while also hiding whitespace and + syntax fields. + + Setting ``show_default`` to ``True`` will add fields regardless if their + value is different from the default value. + + Setting ``show_whitespace`` will add whitespace fields and setting + ``show_syntax`` will add syntax fields while respecting the value of + ``show_default``. + + When all keyword args are set to true, the output of this function is + indentical to the __repr__ method of the node. + """ + return "".join( + _node_repr_recursive( + node, + indent=indent, + show_defaults=show_defaults, + show_syntax=show_syntax, + show_whitespace=show_whitespace, + ) + ) def _print_tree_impl(proc_name: str, command_args: List[str]) -> int: @@ -65,11 +249,6 @@ def _print_tree_impl(proc_name: str, command_args: List[str]) -> int: action="store_true", help="Show values that exist only for syntax, like commas or semicolons", ) - parser.add_argument( - "--graphviz", - action="store_true", - help="Displays the graph in .dot format, compatible with Graphviz", - ) parser.add_argument( "--indent-string", default=_DEFAULT_INDENT, @@ -104,25 +283,15 @@ def _print_tree_impl(proc_name: str, command_args: List[str]) -> int: else PartialParserConfig() ), ) - if not args.graphviz: - print( - dump( - tree, - indent=args.indent_string, - show_defaults=args.show_defaults, - show_syntax=args.show_syntax, - show_whitespace=args.show_whitespace, - ) - ) - else: - print( - dump_graphviz( - tree, - show_defaults=args.show_defaults, - show_syntax=args.show_syntax, - show_whitespace=args.show_whitespace, - ) + print( + dump( + tree, + indent=args.indent_string, + show_defaults=args.show_defaults, + show_syntax=args.show_syntax, + show_whitespace=args.show_whitespace, ) + ) return 0 @@ -194,7 +363,7 @@ def _find_and_load_config(proc_name: str) -> Dict[str, Any]: requires_config = bool(os.environ.get("LIBCST_TOOL_REQUIRE_CONFIG", "")) if requires_config and not found_config: - raise FileNotFoundError( + raise Exception( f"Did not find a {CONFIG_FILE_NAME} in current directory or any " + "parent directory! Perhaps you meant to run this command from a " + "configured subdirectory, or you need to initialize a new project " @@ -203,7 +372,10 @@ def _find_and_load_config(proc_name: str) -> Dict[str, Any]: # Make sure that the formatter is findable. if config["formatter"]: - exe = shutil.which(config["formatter"][0]) or config["formatter"][0] + exe = ( + distutils.spawn.find_executable(config["formatter"][0]) + or config["formatter"][0] + ) config["formatter"] = [os.path.abspath(exe), *config["formatter"][1:]] return config @@ -218,49 +390,38 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 # full parser below once we know the command and have added its arguments. parser = argparse.ArgumentParser(add_help=False, fromfile_prefix_chars="@") parser.add_argument("command", metavar="COMMAND", type=str, nargs="?", default=None) - ext_action = parser.add_argument( - "-x", - "--external", - action="store_true", - default=False, - help="Interpret `command` as just a module/class specifier", - ) args, _ = parser.parse_known_args(command_args) # Now, try to load the class and get its arguments for help purposes. if args.command is not None: - command_module_name, _, command_class_name = args.command.rpartition(".") - if not (command_module_name and command_class_name): + command_path = args.command.split(".") + if len(command_path) < 2: print(f"{args.command} is not a valid codemod command", file=sys.stderr) return 1 - if args.external: - # There's no error handling here on purpose; if the user opted in for `-x`, - # they'll probably want to see the exact import error too. - command_class = getattr( - importlib.import_module(command_module_name), - command_class_name, - ) - else: - command_class = None - for module in config["modules"]: - try: - command_class = getattr( - importlib.import_module(f"{module}.{command_module_name}"), - command_class_name, - ) - break - # Only swallow known import errors, show the rest of the exceptions - # to the user who is trying to run the codemod. - except AttributeError: - continue - except ModuleNotFoundError: - continue - if command_class is None: - print( - f"Could not find {command_module_name} in any configured modules", - file=sys.stderr, + command_module_name, command_class_name = ( + ".".join(command_path[:-1]), + command_path[-1], + ) + command_class = None + for module in config["modules"]: + try: + command_class = getattr( + importlib.import_module(f"{module}.{command_module_name}"), + command_class_name, ) - return 1 + break + # Only swallow known import errors, show the rest of the exceptions + # to the user who is trying to run the codemod. + except AttributeError: + continue + except ModuleNotFoundError: + continue + if command_class is None: + print( + f"Could not find {command_module_name} in any configured modules", + file=sys.stderr, + ) + return 1 else: # Dummy, specifically to allow for running --help with no arguments. command_class = CodemodCommand @@ -275,7 +436,6 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 prog=f"{proc_name} codemod", fromfile_prefix_chars="@", ) - parser._add_action(ext_action) parser.add_argument( "command", metavar="COMMAND", @@ -361,45 +521,33 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 k: v for k, v in vars(args).items() if k - not in { + not in [ "command", - "external", - "hide_blacklisted_warnings", - "hide_generated_warnings", - "hide_progress", + "path", + "unified_diff", + "jobs", + "python_version", "include_generated", "include_stubs", - "jobs", "no_format", - "path", - "python_version", "show_successes", - "unified_diff", - } + "hide_generated_warnings", + "hide_blacklisted_warnings", + "hide_progress", + ] } - # Sepcify target version for black formatter - if any(config["formatter"]) and os.path.basename(config["formatter"][0]) in ( - "black", - "black.exe", - ): - parsed_version = parse_version_string(args.python_version) - - config["formatter"] = [ - config["formatter"][0], - "--target-version", - f"py{parsed_version.major}{parsed_version.minor}", - ] + config["formatter"][1:] + command_instance = command_class(CodemodContext(), **codemod_args) # Special case for allowing stdin/stdout. Note that this does not allow for # full-repo metadata since there is no path. if any(p == "-" for p in args.path): if len(args.path) > 1: - raise ValueError("Cannot specify multiple paths when reading from stdin!") + raise Exception("Cannot specify multiple paths when reading from stdin!") print("Codemodding from stdin", file=sys.stderr) oldcode = sys.stdin.read() newcode = exec_transform_with_prettyprint( - command_class(CodemodContext(), **codemod_args), # type: ignore + command_instance, oldcode, include_generated=args.include_generated, generated_code_marker=config["generated_code_marker"], @@ -422,7 +570,7 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 files = gather_files(args.path, include_stubs=args.include_stubs) try: result = parallel_exec_transform_with_prettyprint( - command_class, + command_instance, files, jobs=args.jobs, unified_diff=args.unified_diff, @@ -437,7 +585,6 @@ def _codemod_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 blacklist_patterns=config["blacklist_patterns"], python_version=args.python_version, repo_root=config["repo_root"], - codemod_args=codemod_args, ) except KeyboardInterrupt: print("Interrupted!", file=sys.stderr) @@ -466,7 +613,8 @@ class _SerializerBase(ABC): return f"{comments}{os.linesep}{self._serialize_impl(key, value)}{os.linesep}" @abstractmethod - def _serialize_impl(self, key: str, value: object) -> str: ... + def _serialize_impl(self, key: str, value: object) -> str: + ... class _StrSerializer(_SerializerBase): @@ -481,7 +629,7 @@ class _ListSerializer(_SerializerBase): def _serialize_impl(self, key: str, value: object) -> str: if not isinstance(value, list): - raise ValueError("Can only serialize lists!") + raise Exception("Can only serialize lists!") if self.newlines: values = [f"- {v!r}" for v in value] return f"{key}:{os.linesep}{os.linesep.join(values)}" @@ -542,7 +690,7 @@ def _initialize_impl(proc_name: str, command_args: List[str]) -> int: # For safety, verify that it parses to the identical file. actual_config = yaml.safe_load(config_str) if actual_config != default_config: - raise CSTLogicError("Logic error, serialization is invalid!") + raise Exception("Logic error, serialization is invalid!") config_file = os.path.abspath(os.path.join(args.path, CONFIG_FILE_NAME)) with open(config_file, "w") as fp: @@ -611,8 +759,6 @@ def _list_impl(proc_name: str, command_args: List[str]) -> int: # noqa: C901 continue # Grab the path, try to import all of the files inside of it. - # pyre-fixme[6]: For 1st argument expected `PathLike[Variable[AnyStr <: - # [str, bytes]]]` but got `Optional[str]`. path = os.path.dirname(os.path.abspath(imported_module.__file__)) for name, imported_module in _recursive_find(path, module): for objname in dir(imported_module): diff --git a/native/Cargo.lock b/native/Cargo.lock index 16ffd999..fbeaeb78 100644 --- a/native/Cargo.lock +++ b/native/Cargo.lock @@ -4,52 +4,68 @@ version = 3 [[package]] name = "aho-corasick" -version = "1.0.4" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a" +checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" dependencies = [ "memchr", ] [[package]] -name = "anes" -version = "0.1.6" +name = "annotate-snippets" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" +checksum = "c7021ce4924a3f25f802b2cccd1af585e39ea1a363a1aa2e72afe54b67a3a7a7" [[package]] -name = "annotate-snippets" -version = "0.11.5" +name = "atty" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ - "anstyle", - "unicode-width", + "hermit-abi", + "libc", + "winapi", ] [[package]] -name = "anstyle" -version = "1.0.10" +name = "autocfg" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] -name = "autocfg" -version = "1.1.0" +name = "bitflags" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" + +[[package]] +name = "bstr" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90682c8d613ad3373e66de8c6411e0ae2ab2571e879d2efbf73558cc66f21279" +dependencies = [ + "lazy_static", + "memchr", + "regex-automata", + "serde", +] [[package]] name = "bumpalo" -version = "3.12.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" +checksum = "9c59e7af012c713f529e7a3ee57ce9b31ddd858d4b512923602f74608b009631" [[package]] name = "cast" -version = "0.3.0" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" +checksum = "4c24dab4283a142afa2fdca129b80ad2c6284e073930f964c3a1293c225ee39a" +dependencies = [ + "rustc_version", +] [[package]] name = "cfg-if" @@ -58,95 +74,85 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] -name = "ciborium" -version = "0.2.0" +name = "chic" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c137568cc60b904a7724001b35ce2630fd00d5d84805fbb608ab89509d788f" +checksum = "a5b5db619f3556839cb2223ae86ff3f9a09da2c5013be42bc9af08c9589bf70c" dependencies = [ - "ciborium-io", - "ciborium-ll", - "serde", -] - -[[package]] -name = "ciborium-io" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "346de753af073cc87b52b2083a506b38ac176a44cfb05497b622e27be899b369" - -[[package]] -name = "ciborium-ll" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b" -dependencies = [ - "ciborium-io", - "half", + "annotate-snippets", ] [[package]] name = "clap" -version = "4.5.38" +version = "2.33.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000" +checksum = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002" dependencies = [ - "clap_builder", + "bitflags", + "textwrap", + "unicode-width", ] -[[package]] -name = "clap_builder" -version = "4.5.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120" -dependencies = [ - "anstyle", - "clap_lex", -] - -[[package]] -name = "clap_lex" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" - [[package]] name = "criterion" -version = "0.6.0" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679" +checksum = "ab327ed7354547cc2ef43cbe20ef68b988e70b4b593cbd66a2a61733123a3d23" dependencies = [ - "anes", + "atty", "cast", - "ciborium", "clap", "criterion-plot", - "itertools 0.13.0", + "csv", + "itertools 0.10.1", + "lazy_static", "num-traits", "oorandom", "plotters", "rayon", "regex", "serde", + "serde_cbor", + "serde_derive", "serde_json", "tinytemplate", "walkdir", ] [[package]] -name = "criterion-plot" -version = "0.5.0" +name = "criterion-cycles-per-byte" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +checksum = "8d34485a578330c7a91ccf064674f3739a7aebbf3b9d7fd498a6d3e8f7473c96" +dependencies = [ + "criterion", +] + +[[package]] +name = "criterion-plot" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e022feadec601fba1649cfa83586381a4ad31c6bf3a9ab7d408118b05dd9889d" dependencies = [ "cast", - "itertools 0.10.5", + "itertools 0.9.0", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4" +dependencies = [ + "cfg-if", + "crossbeam-utils", ] [[package]] name = "crossbeam-deque" -version = "0.8.1" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9" dependencies = [ "cfg-if", "crossbeam-epoch", @@ -155,26 +161,47 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.9" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d" +checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd" dependencies = [ - "autocfg", "cfg-if", "crossbeam-utils", - "memoffset 0.6.5", - "once_cell", + "lazy_static", + "memoffset", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.10" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83" +checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" dependencies = [ "cfg-if", - "once_cell", + "lazy_static", +] + +[[package]] +name = "csv" +version = "1.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1" +dependencies = [ + "bstr", + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" +dependencies = [ + "memchr", ] [[package]] @@ -189,175 +216,181 @@ version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "glob" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" - [[package]] name = "half" -version = "1.8.2" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3" [[package]] -name = "hashbrown" -version = "0.14.5" +name = "hermit-abi" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "indexmap" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ - "equivalent", - "hashbrown", + "libc", ] [[package]] name = "indoc" -version = "2.0.4" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" +checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8" +dependencies = [ + "indoc-impl", + "proc-macro-hack", +] + +[[package]] +name = "indoc-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn", + "unindent", +] + +[[package]] +name = "instant" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bee0328b1209d157ef001c94dd85b4f8f64139adb0eac2659f4b08382b2f474d" +dependencies = [ + "cfg-if", +] [[package]] name = "itertools" -version = "0.10.5" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" dependencies = [ "either", ] [[package]] name = "itertools" -version = "0.13.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" dependencies = [ "either", ] [[package]] name = "itoa" -version = "1.0.2" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" +checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736" [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "83bdfbace3a0e81a4253f73b49e960b053e396a11012cbd49b9b74d6a2b67062" dependencies = [ - "once_cell", "wasm-bindgen", ] [[package]] -name = "libc" -version = "0.2.149" +name = "lazy_static" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.98" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320cfe77175da3a483efed4bc0adc1968ca050b098ce4f2f1c13a56626128790" [[package]] name = "libcst" -version = "1.8.6" +version = "0.1.0" dependencies = [ - "annotate-snippets", + "chic", "criterion", + "criterion-cycles-per-byte", "difference", - "itertools 0.14.0", + "itertools 0.10.1", "libcst_derive", - "memchr", - "paste", + "once_cell", + "paste 1.0.5", "peg", "pyo3", - "rayon", "regex", "thiserror", ] [[package]] name = "libcst_derive" -version = "1.8.6" +version = "0.1.0" dependencies = [ "quote", "syn", - "trybuild", +] + +[[package]] +name = "lock_api" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0382880606dff6d15c9476c416d18690b72742aa7b605bb6dd6ec9030fbf07eb" +dependencies = [ + "scopeguard", ] [[package]] name = "log" -version = "0.4.17" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" dependencies = [ "cfg-if", ] [[package]] name = "memchr" -version = "2.7.4" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "b16bd47d9e329435e309c58469fe0791c2d0d1ba96ec0954152a5ae2b04387dc" [[package]] name = "memoffset" -version = "0.6.5" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" -dependencies = [ - "autocfg", -] - -[[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9" dependencies = [ "autocfg", ] [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" dependencies = [ "autocfg", ] [[package]] -name = "once_cell" -version = "1.21.3" +name = "num_cpus" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "once_cell" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" [[package]] name = "oorandom" @@ -366,16 +399,59 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] -name = "paste" -version = "1.0.15" +name = "parking_lot" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7a782938e745763fe6907fc6ba86946d72f49fe7e21de074e08128a99fb018" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "paste" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880" +dependencies = [ + "paste-impl", + "proc-macro-hack", +] + +[[package]] +name = "paste" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" + +[[package]] +name = "paste-impl" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6" +dependencies = [ + "proc-macro-hack", +] [[package]] name = "peg" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9928cfca101b36ec5163e70049ee5368a8a1c3c6efc9ca9c5f9cc2f816152477" +version = "0.7.0" +source = "git+https://github.com/kevinmehall/rust-peg?rev=4b146b4b78a80c07e43d7ace2d97f65bfde279a8#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" dependencies = [ "peg-macros", "peg-runtime", @@ -383,9 +459,8 @@ dependencies = [ [[package]] name = "peg-macros" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6298ab04c202fa5b5d52ba03269fb7b74550b150323038878fe6c372d8280f71" +version = "0.7.0" +source = "git+https://github.com/kevinmehall/rust-peg?rev=4b146b4b78a80c07e43d7ace2d97f65bfde279a8#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" dependencies = [ "peg-runtime", "proc-macro2", @@ -394,15 +469,14 @@ dependencies = [ [[package]] name = "peg-runtime" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "132dca9b868d927b35b5dd728167b2dee150eb1ad686008fc71ccb298b776fca" +version = "0.7.0" +source = "git+https://github.com/kevinmehall/rust-peg?rev=4b146b4b78a80c07e43d7ace2d97f65bfde279a8#4b146b4b78a80c07e43d7ace2d97f65bfde279a8" [[package]] name = "plotters" -version = "0.3.7" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +checksum = "32a3fd9ec30b9749ce28cd91f255d569591cdf937fe280c312143e3c4bad6f2a" dependencies = [ "num-traits", "plotters-backend", @@ -413,77 +487,65 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.7" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" +checksum = "d88417318da0eaf0fdcdb51a0ee6c3bed624333bff8f946733049380be67ac1c" [[package]] name = "plotters-svg" -version = "0.3.7" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +checksum = "521fa9638fa597e1dc53e9412a4f9cefb01187ee1f7413076f9e6749e2885ba9" dependencies = [ "plotters-backend", ] [[package]] -name = "portable-atomic" -version = "1.4.3" +name = "proc-macro-hack" +version = "0.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31114a898e107c51bb1609ffaf55a0e011cf6a4d7f1170d0015a165082c0338b" +checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro2" -version = "1.0.95" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +checksum = "5c7ed8b8c7b886ea3ed7dde405212185f423ab44682667c8c6dd14aa1d9f6612" dependencies = [ - "unicode-ident", + "unicode-xid", ] [[package]] name = "pyo3" -version = "0.26.0" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383" +checksum = "35100f9347670a566a67aa623369293703322bb9db77d99d7df7313b575ae0c8" dependencies = [ + "cfg-if", "indoc", "libc", - "memoffset 0.9.0", - "once_cell", - "portable-atomic", + "parking_lot", + "paste 0.1.18", "pyo3-build-config", - "pyo3-ffi", "pyo3-macros", "unindent", ] [[package]] name = "pyo3-build-config" -version = "0.26.0" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f" +checksum = "d12961738cacbd7f91b7c43bc25cfeeaa2698ad07a04b3be0aa88b950865738f" dependencies = [ - "target-lexicon", -] - -[[package]] -name = "pyo3-ffi" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105" -dependencies = [ - "libc", - "pyo3-build-config", + "once_cell", ] [[package]] name = "pyo3-macros" -version = "0.26.0" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded" +checksum = "fc0bc5215d704824dfddddc03f93cb572e1155c68b6761c37005e1c288808ea8" dependencies = [ - "proc-macro2", "pyo3-macros-backend", "quote", "syn", @@ -491,11 +553,10 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.26.0" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf" +checksum = "71623fc593224afaab918aa3afcaf86ed2f43d34f6afde7f3922608f253240df" dependencies = [ - "heck", "proc-macro2", "pyo3-build-config", "quote", @@ -504,73 +565,84 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.40" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" dependencies = [ "proc-macro2", ] [[package]] name = "rayon" -version = "1.11.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" dependencies = [ + "autocfg", + "crossbeam-deque", "either", "rayon-core", ] [[package]] name = "rayon-core" -version = "1.13.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" dependencies = [ + "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", + "lazy_static", + "num_cpus", +] + +[[package]] +name = "redox_syscall" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ab49abadf3f9e1c4bc499e8845e152ad87d2ad2d30371841171169e9d75feee" +dependencies = [ + "bitflags", ] [[package]] name = "regex" -version = "1.11.2" +version = "1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912" +checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" dependencies = [ "aho-corasick", "memchr", - "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.6.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" [[package]] -name = "rustversion" -version = "1.0.21" +name = "rustc_version" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] [[package]] name = "ryu" -version = "1.0.10" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" +checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" [[package]] name = "same-file" @@ -588,19 +660,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] -name = "serde" -version = "1.0.208" +name = "semver" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2" +checksum = "5f3aac57ee7f3272d8395c6e4f502f434f0e289fcd62876f70daa008c20dcabe" + +[[package]] +name = "serde" +version = "1.0.126" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03" + +[[package]] +name = "serde_cbor" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e18acfa2f90e8b735b2836ab8d538de304cbb6729a7360729ea5a895d15a622" dependencies = [ - "serde_derive", + "half", + "serde", ] [[package]] name = "serde_derive" -version = "1.0.208" +version = "1.0.126" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" +checksum = "963a7dbc9895aeac7ac90e74f34a5d5261828f79df35cbed41e10189d3804d43" dependencies = [ "proc-macro2", "quote", @@ -609,71 +694,55 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.125" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed" +checksum = "799e97dc9fdae36a5c8b8f2cae9ce2ee9fdce2058c57a93e6099d919fd982f79" dependencies = [ "itoa", - "memchr", "ryu", "serde", ] [[package]] -name = "serde_spanned" -version = "0.6.7" +name = "smallvec" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" -dependencies = [ - "serde", -] +checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" [[package]] name = "syn" -version = "2.0.101" +version = "1.0.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" +checksum = "1873d832550d4588c3dbc20f01361ab00bfe741048f71e3fecf145a7cc18b29c" dependencies = [ "proc-macro2", "quote", - "unicode-ident", + "unicode-xid", ] [[package]] -name = "target-lexicon" -version = "0.13.2" +name = "textwrap" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a" - -[[package]] -name = "target-triple" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ac9aa371f599d22256307c24a9d748c041e548cbf599f35d890f9d365361790" - -[[package]] -name = "termcolor" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" dependencies = [ - "winapi-util", + "unicode-width", ] [[package]] name = "thiserror" -version = "2.0.12" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "93119e4feac1cbe6c798c34d3a53ea0026b0b1de6a120deef895137c0529bfe2" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "060d69a0afe7796bf42e9e2ff91f5ee691fb15c53d38b4b62a9a53eb23164745" dependencies = [ "proc-macro2", "quote", @@ -690,72 +759,23 @@ dependencies = [ "serde_json", ] -[[package]] -name = "toml" -version = "0.8.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit", -] - -[[package]] -name = "toml_datetime" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_edit" -version = "0.22.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" -dependencies = [ - "indexmap", - "serde", - "serde_spanned", - "toml_datetime", - "winnow", -] - -[[package]] -name = "trybuild" -version = "1.0.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c9bf9513a2f4aeef5fdac8677d7d349c79fdbcc03b9c86da6e9d254f1e43be2" -dependencies = [ - "glob", - "serde", - "serde_derive", - "serde_json", - "target-triple", - "termcolor", - "toml", -] - -[[package]] -name = "unicode-ident" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" - [[package]] name = "unicode-width" -version = "0.2.0" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" [[package]] name = "unindent" -version = "0.2.3" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" +checksum = "f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7" [[package]] name = "walkdir" @@ -770,23 +790,22 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "d54ee1d4ed486f78874278e63e4069fc1ab9f6a18ca492076ffb90c5eb2997fd" dependencies = [ "cfg-if", - "once_cell", - "rustversion", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.100" +version = "0.2.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +checksum = "3b33f6a0694ccfea53d94db8b2ed1c3a8a4c86dd936b13b9f0a15ec4a451b900" dependencies = [ "bumpalo", + "lazy_static", "log", "proc-macro2", "quote", @@ -796,9 +815,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "088169ca61430fe1e58b8096c24975251700e7b1f6fd91cc9d59b04fb9b18bd4" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -806,9 +825,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "be2241542ff3d9f241f5e2cb6dd09b37efe786df8851c54957683a49f0987a97" dependencies = [ "proc-macro2", "quote", @@ -819,18 +838,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" -dependencies = [ - "unicode-ident", -] +checksum = "d7cff876b8f18eed75a66cf49b65e7f967cb354a7aa16003fb55dbfd25b44b4f" [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "e828417b379f3df7111d3a2a9e5753706cae29c41f7c4029ee9fd77f3e09e582" dependencies = [ "js-sys", "wasm-bindgen", @@ -866,12 +882,3 @@ name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "winnow" -version = "0.6.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" -dependencies = [ - "memchr", -] diff --git a/native/libcst/Cargo.toml b/native/libcst/Cargo.toml index e4c9f45f..2247abf3 100644 --- a/native/libcst/Cargo.toml +++ b/native/libcst/Cargo.toml @@ -1,20 +1,13 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. [package] name = "libcst" -version = "1.8.6" +version = "0.1.0" authors = ["LibCST Developers"] edition = "2018" -rust-version = "1.70" -description = "A Python parser and Concrete Syntax Tree library." -license = "MIT AND (MIT AND PSF-2.0)" -repository = "https://github.com/Instagram/LibCST" -documentation = "https://libcst.rtfd.org" -keywords = ["python", "cst", "ast"] -categories = ["parser-implementations"] [lib] name = "libcst_native" @@ -30,25 +23,24 @@ path = "src/bin.rs" # # Once https://github.com/PyO3/pyo3/pull/1123 lands, it may be better to use # `-Zextra-link-arg` for this instead. -default = ["py"] -py = ["pyo3", "pyo3/extension-module"] +default = ["pyo3/extension-module"] trace = ["peg/trace"] [dependencies] -paste = "1.0.15" -pyo3 = { version = "0.26", optional = true } -thiserror = "2.0.12" -peg = "0.8.5" -annotate-snippets = "0.11.5" -regex = "1.11.2" -memchr = "2.7.4" -libcst_derive = { path = "../libcst_derive", version = "1.8.6" } +paste = "1.0.4" +pyo3 = "0.14.4" +thiserror = "1.0.23" +peg = { git = "https://github.com/kevinmehall/rust-peg", rev = "4b146b4b78a80c07e43d7ace2d97f65bfde279a8" } +chic = "1.2.2" +itertools = "0.10.0" +once_cell = "1.5.2" +regex = "1.5.4" +libcst_derive = { path = "../libcst_derive" } [dev-dependencies] -criterion = { version = "0.6.0", features = ["html_reports"] } +criterion = { version = "0.3.4", features = ["html_reports"] } +criterion-cycles-per-byte = "0.1" difference = "2.0.0" -rayon = "1.11.0" -itertools = "0.14.0" [[bench]] name = "parser_benchmark" diff --git a/native/libcst/LICENSE b/native/libcst/LICENSE deleted file mode 100644 index 5594616f..00000000 --- a/native/libcst/LICENSE +++ /dev/null @@ -1,102 +0,0 @@ -All contributions towards LibCST are MIT licensed. - -Some Python files have been derived from the standard library and are therefore -PSF licensed. Modifications on these files are dual licensed (both MIT and -PSF). These files are: - -- libcst/_parser/base_parser.py -- libcst/_parser/parso/utils.py -- libcst/_parser/parso/pgen2/generator.py -- libcst/_parser/parso/pgen2/grammar_parser.py -- libcst/_parser/parso/python/py_token.py -- libcst/_parser/parso/python/tokenize.py -- libcst/_parser/parso/tests/test_fstring.py -- libcst/_parser/parso/tests/test_tokenize.py -- libcst/_parser/parso/tests/test_utils.py -- native/libcst/src/tokenizer/core/mod.rs -- native/libcst/src/tokenizer/core/string_types.rs - -Some Python files have been taken from dataclasses and are therefore Apache -licensed. Modifications on these files are licensed under Apache 2.0 license. -These files are: - -- libcst/_add_slots.py - -------------------------------------------------------------------------------- - -MIT License - -Copyright (c) Meta Platforms, Inc. and affiliates. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -------------------------------------------------------------------------------- - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved" -are retained in Python alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - -------------------------------------------------------------------------------- - -APACHE LICENSE, VERSION 2.0 - -http://www.apache.org/licenses/LICENSE-2.0 diff --git a/native/libcst/README.md b/native/libcst/README.md index 09fe8f99..f33563b2 100644 --- a/native/libcst/README.md +++ b/native/libcst/README.md @@ -1,122 +1,66 @@ -# libcst/native +# libcst_native -A native extension to enable parsing of new Python grammar in LibCST. +A very experimental native extension to speed up LibCST. This does not currently provide +much performance benefit and is therefore not recommended for general use. -The extension is written in Rust, and exposed to Python using [PyO3](https://pyo3.rs/). -This is packaged together with libcst, and can be imported from `libcst.native`. By default -the LibCST APIs use this module for all parsing. +The extension is written in Rust using [PyO3](https://pyo3.rs/). -Later on, the parser library might be packaged separately as -[a Rust crate](https://crates.io). Pull requests towards this are much appreciated. - -## Goals - -1. Adopt the CPython grammar definition as closely as possible to reduce maintenance - burden. This means using a PEG parser. -2. Feature-parity with the pure-python LibCST parser: the API should be easy to use from - Python, support parsing with a target version, bytes and strings as inputs, etc. -3. [future] Performance. The aspirational goal is to be within 2x CPython performance, - which would enable LibCST to be used in interactive use cases (think IDEs). -4. [future] Error recovery. The parser should be able to handle partially complete - documents, returning a CST for the syntactically correct parts, and a list of errors - found. - -## Structure - -The extension is organized into two rust crates: `libcst_derive` contains some macros to -facilitate various features of CST nodes, and `libcst` contains the `parser` itself -(including the Python grammar), a `tokenizer` implementation by @bgw, and a very basic -representation of CST `nodes`. Parsing is done by -1. **tokenizing** the input utf-8 string (bytes are not supported at the Rust layer, - they are converted to utf-8 strings by the python wrapper) -2. running the **PEG parser** on the tokenized input, which also captures certain anchor - tokens in the resulting syntax tree -3. using the anchor tokens to **inflate** the syntax tree into a proper CST - -These steps are wrapped into a high-level `parse_module` API -[here](https://github.com/Instagram/LibCST/blob/main/native/libcst/src/lib.rs#L43), -along with `parse_statement` and `parse_expression` functions which all just accept the -input string and an optional encoding. - -These Rust functions are exposed to Python -[here](https://github.com/Instagram/LibCST/blob/main/native/libcst/src/py.rs) using the -excellent [PyO3](https://pyo3.rs/) library, plus an `IntoPy` trait which is mostly -implemented via a macro in `libcst_derive`. +This installs as a separate python package that LibCST looks for and will import if it's +available. -## Hacking +## Using with LibCST -### Nodes -All CST nodes are marked with the `#[cst_node]` proc macro, which duplicates the node types; for a node named `Foo`, there's: +[Set up a rust development environment](https://www.rust-lang.org/tools/install). Using +`rustup` is recommended, but not necessary. Rust 1.45.0+ should work. -- `DeflatedFoo`, which is the output of the parsing phase and isn't exposed through the - API of the crate. - - it has two lifetime parameters: `'r` (or `'input` in the grammar) is the lifetime of - `Token` references, and `'a` is the lifetime of `str` slices from the original input - - `TokenRef` fields are contained here, while whitespace fields aren't - - if there aren't any fields that refer to other CST nodes or `TokenRef`s, there's an - extra (private) `_phantom` field that "contains" the two lifetime parameters (this - is to make the type parameters of all `DeflatedFoo` types uniform) - - it implements the `Inflate` trait, which converts `DeflatedFoo` into `Foo` -- `Foo`, which is what's publicly exposed in the crate and is the output of `Inflate`ing `DeflatedFoo`. - - it only retains the second (`'a`) lifetime parameter of `DeflatedFoo` to refer back to slices of the original input string - - whitespace fields are contained here, but `TokenRef`s aren't - - `IntoPy` is implemented for it (assuming the `py` crate feature is enabled), which contains code to translate `Foo` back into a Python object; hence, the fields on `Foo` match the Python CST node implementations (barring fields marked with `#[skip_py]`) +Follow the instructions for setting up a virtualenv in the top-level README, then: -### Grammar +``` +cd libcst_native +maturin develop # install libcst_native to the virtualenv +cd .. # cd back into the main project +python -m unittest +``` -The grammar is mostly a straightforward translation from the [CPython -grammar](https://github.com/python/cpython/blob/main/Grammar/python.gram), with some -exceptions: +This will run the python test suite. Nothing special is required to use `libcst_native`, +since `libcst` will automatically use the native extension when it's installed. -* The output of grammar rules are deflated CST nodes that capture the AST plus - additional anchor token references used for whitespace parsing later on. -* Rules in the grammar must be strongly typed, as enforced by the Rust compiler. The - CPython grammar rules are a bit more loosely-typed in comparison. -* Some features in the CPython peg parser are not supported by rust-peg: keywords, - mutually recursive rules, special `invalid_` rules, the `~` operator, terminating the - parser early. +When benchmarking this code, make sure to run `maturin develop` with the `--release` +flag to enable compiler optimizations. -The PEG parser is run on a `Vec` of `Token`s (more precisely `&'input Vec>`), -and tries its best to avoid allocating any strings, working only with references. As -such, the output nodes don't own any strings, but refer to slices of the original input -(hence the `'input, 'a` lifetime parameters on almost all nodes). +You can disable the native extension by uninstalling the package from your virtualenv: -### Whitespace parsing +``` +pip uninstall libcst_native +``` -The `Inflate` trait is responsible for taking a "deflated", skeleton CST node, and -parsing out the relevant whitespace from the anchor tokens to produce an "inflated" -(normal) CST node. In addition to the deflated node, inflation requires a whitespace -config object which contains global information required for certain aspects of -whitespace parsing, like the default indentation. -Inflation consumes the deflated node, while mutating the tokens referenced by it. This -is important to make sure whitespace is only ever assigned to at most one CST node. The -`Inflate` trait implementation needs to ensure that all whitespace is assigned to a CST -node; this is generally verified using roundtrip tests (i.e. parsing code and then -generating it back to then assert the original and generated are byte-by-byte equal). - -The general convention is that the top-most possible node owns a certain piece of -whitespace, which should be straightforward to achieve in a top-down parser like -`Inflate`. In cases where whitespace is shared between sibling nodes, usually the -leftmost node owns the whitespace except in the case of trailing commas and closing -parentheses, where the latter owns the whitespace (for backwards compatibility with the -pure python parser). See the implementation of `inflate_element` for how this is done. - -### Tests +## Rust Tests In addition to running the python test suite, you can run some tests written in rust with ``` -cd native -cargo test +cargo test --no-default-features ``` -These include unit and roundtrip tests. +The `--no-default-features` flag needed to work around an incompatibility between tests +and pyo3's `extension-module` feature. -Additionally, some benchmarks can be run on x86-based architectures using `cargo bench`. -### Code Formatting +## Code Formatting Use `cargo fmt` to format your code. + + +## Release + +This isn't currently supported, so there's no releases available, but the end-goal would +be to publish this on PyPI. + +Because this is a native extension, it must be re-built for each platform/architecture. +The per-platform build could be automated using a CI system, [like github +actions][gh-actions]. + +[gh-actions]: https://github.com/PyO3/maturin/blob/master/.github/workflows/release.yml diff --git a/native/libcst/benches/parser_benchmark.rs b/native/libcst/benches/parser_benchmark.rs index 4987022a..7a12ebb3 100644 --- a/native/libcst/benches/parser_benchmark.rs +++ b/native/libcst/benches/parser_benchmark.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree @@ -9,22 +9,15 @@ use std::{ }; use criterion::{ - black_box, criterion_group, criterion_main, measurement::Measurement, BatchSize, BenchmarkId, - Criterion, Throughput, + black_box, criterion_group, criterion_main, measurement::Measurement, BatchSize, Criterion, }; +use criterion_cycles_per_byte::CyclesPerByte; use itertools::Itertools; -use rayon::prelude::*; - use libcst_native::{ parse_module, parse_tokens_without_whitespace, tokenize, Codegen, Config, Inflate, }; -#[cfg(not(windows))] -const NEWLINE: &str = "\n"; -#[cfg(windows)] -const NEWLINE: &str = "\r\n"; - -fn load_all_fixtures_vec() -> Vec { +fn load_all_fixtures() -> String { let mut path = PathBuf::from(file!()); path.pop(); path.pop(); @@ -45,23 +38,18 @@ fn load_all_fixtures_vec() -> Vec { let path = file.unwrap().path(); std::fs::read_to_string(&path).expect("reading_file") }) - .collect() -} - -fn load_all_fixtures() -> String { - load_all_fixtures_vec().join(NEWLINE) + .join("\n") } pub fn inflate_benchmarks(c: &mut Criterion) { let fixture = load_all_fixtures(); let tokens = tokenize(fixture.as_str()).expect("tokenize failed"); - let tokvec = tokens.clone().into(); let mut group = c.benchmark_group("inflate"); group.bench_function("all", |b| { b.iter_batched( || { let conf = Config::new(fixture.as_str(), &tokens); - let m = parse_tokens_without_whitespace(&tokvec, fixture.as_str(), None) + let m = parse_tokens_without_whitespace(tokens.clone(), fixture.as_str(), None) .expect("parse failed"); (conf, m) }, @@ -78,13 +66,13 @@ pub fn parser_benchmarks(c: &mut Criterion) { group.measurement_time(Duration::from_secs(15)); group.bench_function("all", |b| { b.iter_batched( - || tokenize(fixture.as_str()).expect("tokenize failed").into(), + || tokenize(fixture.as_str()).expect("tokenize failed"), |tokens| { - black_box(drop(parse_tokens_without_whitespace( - &tokens, + black_box(parse_tokens_without_whitespace( + tokens, fixture.as_str(), None, - ))) + )) }, BatchSize::SmallInput, ) @@ -94,7 +82,7 @@ pub fn parser_benchmarks(c: &mut Criterion) { pub fn codegen_benchmarks(c: &mut Criterion) { let input = load_all_fixtures(); - let m = parse_module(input.as_str(), None).expect("parse failed"); + let m = parse_module(&input, None).expect("parse failed"); let mut group = c.benchmark_group("codegen"); group.bench_function("all", |b| { b.iter(|| { @@ -114,57 +102,9 @@ pub fn tokenize_benchmarks(c: &mut Criterion) { group.finish(); } -pub fn parse_into_cst_benchmarks(c: &mut Criterion) { - let fixture = load_all_fixtures(); - let mut group = c.benchmark_group("parse_into_cst"); - group.measurement_time(Duration::from_secs(15)); - group.bench_function("all", |b| { - b.iter(|| black_box(parse_module(&fixture, None))) - }); - group.finish(); -} - -pub fn parse_into_cst_multithreaded_benchmarks( - c: &mut Criterion, -) where - ::Value: Send, -{ - let fixtures = load_all_fixtures_vec(); - let mut group = c.benchmark_group("parse_into_cst_parallel"); - group.measurement_time(Duration::from_secs(15)); - group.warm_up_time(Duration::from_secs(5)); - - for thread_count in 1..10 { - let expanded_fixtures = (0..thread_count) - .flat_map(|_| fixtures.clone()) - .collect_vec(); - group.throughput(Throughput::Elements(expanded_fixtures.len() as u64)); - group.bench_with_input( - BenchmarkId::from_parameter(thread_count), - &thread_count, - |b, thread_count| { - let thread_pool = rayon::ThreadPoolBuilder::new() - .num_threads(*thread_count) - .build() - .unwrap(); - thread_pool.install(|| { - b.iter_with_large_drop(|| { - expanded_fixtures - .par_iter() - .map(|contents| black_box(parse_module(&contents, None))) - .collect::>() - }); - }); - }, - ); - } - - group.finish(); -} - criterion_group!( name=benches; - config=Criterion::default(); - targets=parser_benchmarks, codegen_benchmarks, inflate_benchmarks, tokenize_benchmarks, parse_into_cst_benchmarks, parse_into_cst_multithreaded_benchmarks + config = Criterion::default().with_measurement(CyclesPerByte); + targets=parser_benchmarks, codegen_benchmarks, inflate_benchmarks, tokenize_benchmarks ); criterion_main!(benches); diff --git a/native/libcst/src/bin.rs b/native/libcst/src/bin.rs index 1517cce4..1b42859a 100644 --- a/native/libcst/src/bin.rs +++ b/native/libcst/src/bin.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/lib.rs b/native/libcst/src/lib.rs index 113d5d02..e4355997 100644 --- a/native/libcst/src/lib.rs +++ b/native/libcst/src/lib.rs @@ -1,23 +1,21 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. use std::cmp::{max, min}; -pub mod tokenizer; +mod tokenizer; pub use tokenizer::whitespace_parser::Config; use tokenizer::{whitespace_parser, TokConfig, Token, TokenIterator}; mod nodes; -use nodes::deflated::Module as DeflatedModule; pub use nodes::*; mod parser; -use parser::{ParserError, Result, TokVec}; +use parser::{ParserError, Result}; -#[cfg(feature = "py")] pub mod py; pub fn tokenize(text: &str) -> Result> { @@ -25,7 +23,7 @@ pub fn tokenize(text: &str) -> Result> { text, &TokConfig { async_hacks: false, - split_ftstring: true, + split_fstring: true, }, ); @@ -33,6 +31,15 @@ pub fn tokenize(text: &str) -> Result> { .map_err(|err| ParserError::TokenizerError(err, text)) } +pub fn parse_tokens_without_whitespace<'a>( + tokens: Vec>, + module_text: &'a str, + encoding: Option<&str>, +) -> Result<'a, Module<'a>> { + parser::python::file(&tokens.into(), module_text, encoding) + .map_err(|err| ParserError::ParserError(err, module_text)) +} + pub fn parse_module<'a>( mut module_text: &'a str, encoding: Option<&str>, @@ -43,26 +50,14 @@ pub fn parse_module<'a>( } let tokens = tokenize(module_text)?; let conf = whitespace_parser::Config::new(module_text, &tokens); - let tokvec = tokens.into(); - let m = parse_tokens_without_whitespace(&tokvec, module_text, encoding)?; + let m = parse_tokens_without_whitespace(tokens, module_text, encoding)?; Ok(m.inflate(&conf)?) } -pub fn parse_tokens_without_whitespace<'r, 'a>( - tokens: &'r TokVec<'a>, - module_text: &'a str, - encoding: Option<&str>, -) -> Result<'a, DeflatedModule<'r, 'a>> { - let m = parser::python::file(tokens, module_text, encoding) - .map_err(|err| ParserError::ParserError(err, module_text))?; - Ok(m) -} - pub fn parse_statement(text: &str) -> Result { let tokens = tokenize(text)?; let conf = whitespace_parser::Config::new(text, &tokens); - let tokvec = tokens.into(); - let stm = parser::python::statement_input(&tokvec, text) + let stm = parser::python::statement_input(&tokens.into(), text) .map_err(|err| ParserError::ParserError(err, text))?; Ok(stm.inflate(&conf)?) } @@ -70,8 +65,7 @@ pub fn parse_statement(text: &str) -> Result { pub fn parse_expression(text: &str) -> Result { let tokens = tokenize(text)?; let conf = whitespace_parser::Config::new(text, &tokens); - let tokvec = tokens.into(); - let expr = parser::python::expression_input(&tokvec, text) + let expr = parser::python::expression_input(&tokens.into(), text) .map_err(|err| ParserError::ParserError(err, text))?; Ok(expr.inflate(&conf)?) } @@ -91,37 +85,32 @@ fn bol_offset(source: &str, n: i32) -> usize { pub fn prettify_error(err: ParserError, label: &str) -> std::string::String { match err { ParserError::ParserError(e, module_text) => { - use annotate_snippets::{Level, Renderer, Snippet}; - let loc = e.location; let context = 1; - let line_start = max( - 1, - loc.start_pos - .line - .checked_sub(context as usize) - .unwrap_or(1), - ); let start_offset = bol_offset(module_text, loc.start_pos.line as i32 - context); let end_offset = bol_offset(module_text, loc.end_pos.line as i32 + context + 1); let source = &module_text[start_offset..end_offset]; let start = loc.start_pos.offset - start_offset; let end = loc.end_pos.offset - start_offset; - let end = if start == end { - min(end + 1, end_offset - start_offset + 1) - } else { - end - }; - Renderer::styled() - .render( - Level::Error.title(label).snippet( - Snippet::source(source) - .line_start(line_start) - .fold(false) - .annotations(vec![Level::Error.span(start..end).label(&format!( - "expected {} {} -> {}", - e.expected, loc.start_pos, loc.end_pos - ))]), + chic::Error::new(label) + .error( + max( + 1, + loc.start_pos + .line + .checked_sub(context as usize) + .unwrap_or(1), + ), + start, + if start == end { + min(end + 1, end_offset - start_offset + 1) + } else { + end + }, + source, + format!( + "expected {} {} -> {}", + e.expected, loc.start_pos, loc.end_pos ), ) .to_string() @@ -154,22 +143,6 @@ mod test { parse_module("def g(a, b): ...", None).expect("parse error"); } - #[test] - fn test_single_statement_with_no_newline() { - for src in &[ - "(\n \\\n)", - "(\n \\\n)", - "(\n '''\n''')", - "del _", - "if _:\n '''\n)'''", - "if _:\n ('''\n''')", - "if _:\n '''\n '''", - "if _:\n '''\n ''' ", - ] { - parse_module(src, None).unwrap_or_else(|e| panic!("'{}' doesn't parse: {}", src, e)); - } - } - #[test] fn bol_offset_first_line() { assert_eq!(0, bol_offset("hello", 1)); @@ -191,23 +164,4 @@ mod test { assert_eq!(11, bol_offset("hello\nhello", 3)); assert_eq!(12, bol_offset("hello\nhello\nhello", 3)); } - #[test] - fn test_tstring_basic() { - assert!( - parse_module("t'hello'", None).is_ok(), - "Failed to parse t'hello'" - ); - assert!( - parse_module("t'{hello}'", None).is_ok(), - "Failed to parse t'{{hello}}'" - ); - assert!( - parse_module("t'{hello:r}'", None).is_ok(), - "Failed to parse t'{{hello:r}}'" - ); - assert!( - parse_module("f'line1\\n{hello:r}\\nline2'", None).is_ok(), - "Failed to parse t'line1\\n{{hello:r}}\\nline2'" - ); - } } diff --git a/native/libcst/src/nodes/codegen.rs b/native/libcst/src/nodes/codegen.rs index 1ebf8d80..99cc377a 100644 --- a/native/libcst/src/nodes/codegen.rs +++ b/native/libcst/src/nodes/codegen.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/expression.rs b/native/libcst/src/nodes/expression.rs index c72d301d..c55e327d 100644 --- a/native/libcst/src/nodes/expression.rs +++ b/native/libcst/src/nodes/expression.rs @@ -1,16 +1,14 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. -use std::mem::swap; +use std::{mem::swap, rc::Rc}; use crate::{ inflate_helpers::adjust_parameters_trailing_whitespace, nodes::{ - op::*, - statement::*, - traits::{Inflate, ParenthesizedDeflatedNode, ParenthesizedNode, Result, WithComma}, + traits::{Inflate, ParenthesizedNode, Result, WithComma}, whitespace::ParenthesizableWhitespace, Annotation, AssignEqual, AssignTargetExpression, BinaryOp, BooleanOp, Codegen, CodegenState, Colon, Comma, CompOp, Dot, UnaryOp, @@ -20,13 +18,12 @@ use crate::{ Token, }, }; -#[cfg(feature = "py")] -use libcst_derive::TryIntoPy; -use libcst_derive::{cst_node, Codegen, Inflate, ParenthesizedDeflatedNode, ParenthesizedNode}; +use libcst_derive::{Codegen, Inflate, IntoPy, ParenthesizedNode}; +use pyo3::{types::PyModule, IntoPy}; -type TokenRef<'r, 'a> = &'r Token<'a>; +type TokenRef<'a> = Rc>; -#[cst_node(Default)] +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] pub struct Parameters<'a> { pub params: Vec>, pub star_arg: Option>, @@ -47,40 +44,22 @@ impl<'a> Parameters<'a> { } } -impl<'r, 'a> DeflatedParameters<'r, 'a> { - pub fn is_empty(&self) -> bool { - self.params.is_empty() - && self.star_arg.is_none() - && self.kwonly_params.is_empty() - && self.star_kwarg.is_none() - && self.posonly_params.is_empty() - && self.posonly_ind.is_none() +impl<'a> Inflate<'a> for Parameters<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.posonly_params = self.posonly_params.inflate(config)?; + self.posonly_ind = self.posonly_ind.inflate(config)?; + self.params = self.params.inflate(config)?; + self.star_arg = self.star_arg.inflate(config)?; + self.kwonly_params = self.kwonly_params.inflate(config)?; + self.star_kwarg = self.star_kwarg.inflate(config)?; + Ok(self) } } -impl<'r, 'a> Inflate<'a> for DeflatedParameters<'r, 'a> { - type Inflated = Parameters<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let posonly_params = self.posonly_params.inflate(config)?; - let posonly_ind = self.posonly_ind.inflate(config)?; - let params = self.params.inflate(config)?; - let star_arg = self.star_arg.inflate(config)?; - let kwonly_params = self.kwonly_params.inflate(config)?; - let star_kwarg = self.star_kwarg.inflate(config)?; - Ok(Self::Inflated { - params, - star_arg, - kwonly_params, - star_kwarg, - posonly_params, - posonly_ind, - }) - } -} - -#[cst_node(Inflate)] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate, IntoPy)] pub enum StarArg<'a> { - Star(Box>), + Star(ParamStar<'a>), Param(Box>), } @@ -138,18 +117,14 @@ impl<'a> Codegen<'a> for Parameters<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct ParamSlash<'a> { pub comma: Option>, - pub whitespace_after: ParenthesizableWhitespace<'a>, - - pub(crate) tok: TokenRef<'a>, } impl<'a> ParamSlash<'a> { fn codegen(&self, state: &mut CodegenState<'a>, default_comma: bool) { state.add_token("/"); - self.whitespace_after.codegen(state); match (&self.comma, default_comma) { (Some(comma), _) => comma.codegen(state), (None, true) => state.add_token(", "), @@ -158,20 +133,14 @@ impl<'a> ParamSlash<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedParamSlash<'r, 'a> { - type Inflated = ParamSlash<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after = - parse_parenthesizable_whitespace(config, &mut self.tok.whitespace_after.borrow_mut())?; - let comma = self.comma.inflate(config)?; - Ok(Self::Inflated { - comma, - whitespace_after, - }) +impl<'a> Inflate<'a> for ParamSlash<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.comma = self.comma.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct ParamStar<'a> { pub comma: Comma<'a>, } @@ -183,31 +152,25 @@ impl<'a> Codegen<'a> for ParamStar<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedParamStar<'r, 'a> { - type Inflated = ParamStar<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let comma = self.comma.inflate(config)?; - Ok(Self::Inflated { comma }) +impl<'a> Inflate<'a> for ParamStar<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.comma = self.comma.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode, Default)] +#[derive(Debug, Eq, PartialEq, Default, Clone, ParenthesizedNode, IntoPy)] pub struct Name<'a> { pub value: &'a str, pub lpar: Vec>, pub rpar: Vec>, } -impl<'r, 'a> Inflate<'a> for DeflatedName<'r, 'a> { - type Inflated = Name<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - value: self.value, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for Name<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -219,7 +182,7 @@ impl<'a> Codegen<'a> for Name<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Param<'a> { pub name: Name<'a>, pub annotation: Option>, @@ -236,34 +199,25 @@ pub struct Param<'a> { pub(crate) star_tok: Option>, } -impl<'r, 'a> Inflate<'a> for DeflatedParam<'r, 'a> { - type Inflated = Param<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let name = self.name.inflate(config)?; - let annotation = self.annotation.inflate(config)?; - let equal = self.equal.inflate(config)?; - let default = self.default.inflate(config)?; - let comma = self.comma.inflate(config)?; - let whitespace_after_star = if let Some(star_tok) = self.star_tok.as_mut() { - parse_parenthesizable_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())? - } else { - Default::default() - }; - let whitespace_after_param = Default::default(); // TODO - Ok(Self::Inflated { - name, - annotation, - equal, - default, - comma, - star: self.star, - whitespace_after_star, - whitespace_after_param, - }) +impl<'a> Inflate<'a> for Param<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + // TODO: whitespace_after_param missing? + self.name = self.name.inflate(config)?; + self.annotation = self.annotation.inflate(config)?; + self.equal = self.equal.inflate(config)?; + self.default = self.default.inflate(config)?; + self.comma = self.comma.inflate(config)?; + if let Some(star_tok) = self.star_tok.as_mut() { + self.whitespace_after_star = parse_parenthesizable_whitespace( + config, + &mut star_tok.whitespace_after.borrow_mut(), + )?; + } + Ok(self) } } -impl<'r, 'a> Default for DeflatedParam<'r, 'a> { +impl<'a> Default for Param<'a> { fn default() -> Self { Self { name: Default::default(), @@ -272,6 +226,8 @@ impl<'r, 'a> Default for DeflatedParam<'r, 'a> { default: None, comma: None, star: Some(""), // Note: this preserves a quirk of the pure python parser + whitespace_after_param: Default::default(), + whitespace_after_star: Default::default(), star_tok: None, } } @@ -318,7 +274,7 @@ impl<'a> Param<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Arg<'a> { pub value: Expression<'a>, pub keyword: Option>, @@ -331,29 +287,20 @@ pub struct Arg<'a> { pub(crate) star_tok: Option>, } -impl<'r, 'a> Inflate<'a> for DeflatedArg<'r, 'a> { - type Inflated = Arg<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let whitespace_after_star = if let Some(star_tok) = self.star_tok.as_mut() { - parse_parenthesizable_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())? - } else { - Default::default() - }; - let keyword = self.keyword.inflate(config)?; - let equal = self.equal.inflate(config)?; - let value = self.value.inflate(config)?; - let comma = self.comma.inflate(config)?; +impl<'a> Inflate<'a> for Arg<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + if let Some(star_tok) = self.star_tok.as_mut() { + self.whitespace_after_star = parse_parenthesizable_whitespace( + config, + &mut star_tok.whitespace_after.borrow_mut(), + )?; + } + self.keyword = self.keyword.inflate(config)?; + self.equal = self.equal.inflate(config)?; + self.value = self.value.inflate(config)?; + self.comma = self.comma.inflate(config)?; // whitespace_after_arg is handled in Call - let whitespace_after_arg = Default::default(); - Ok(Self::Inflated { - value, - keyword, - equal, - comma, - star: self.star, - whitespace_after_star, - whitespace_after_arg, - }) + Ok(self) } } @@ -381,8 +328,8 @@ impl<'a> Arg<'a> { } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedArg<'r, 'a> { - fn with_comma(self, c: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for Arg<'a> { + fn with_comma(self, c: Comma<'a>) -> Self { Self { comma: Some(c), ..self @@ -390,8 +337,7 @@ impl<'r, 'a> WithComma<'r, 'a> for DeflatedArg<'r, 'a> { } } -#[cst_node] -#[derive(Default)] +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] pub struct LeftParen<'a> { /// Any space that appears directly after this left parenthesis. pub whitespace_after: ParenthesizableWhitespace<'a>, @@ -406,19 +352,17 @@ impl<'a> Codegen<'a> for LeftParen<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedLeftParen<'r, 'a> { - type Inflated = LeftParen<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for LeftParen<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.lpar_tok).whitespace_after.borrow_mut(), )?; - Ok(Self::Inflated { whitespace_after }) + Ok(self) } } -#[cst_node] -#[derive(Default)] +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] pub struct RightParen<'a> { /// Any space that appears directly before this right parenthesis. pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -433,52 +377,51 @@ impl<'a> Codegen<'a> for RightParen<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedRightParen<'r, 'a> { - type Inflated = RightParen<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for RightParen<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.rpar_tok).whitespace_before.borrow_mut(), )?; - Ok(Self::Inflated { whitespace_before }) + Ok(self) } } -#[cst_node(ParenthesizedNode, Codegen, Inflate)] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Eq, PartialEq, Clone, ParenthesizedNode, Codegen, Inflate, IntoPy)] pub enum Expression<'a> { - Name(Box>), - Ellipsis(Box>), - Integer(Box>), - Float(Box>), - Imaginary(Box>), - Comparison(Box>), - UnaryOperation(Box>), - BinaryOperation(Box>), - BooleanOperation(Box>), - Attribute(Box>), - Tuple(Box>), - Call(Box>), - GeneratorExp(Box>), - ListComp(Box>), - SetComp(Box>), - DictComp(Box>), - List(Box>), - Set(Box>), - Dict(Box>), - Subscript(Box>), - StarredElement(Box>), - IfExp(Box>), - Lambda(Box>), - Yield(Box>), - Await(Box>), - SimpleString(Box>), - ConcatenatedString(Box>), - FormattedString(Box>), - TemplatedString(Box>), - NamedExpr(Box>), + Name(Name<'a>), + Ellipsis(Ellipsis<'a>), + Integer(Integer<'a>), + Float(Float<'a>), + Imaginary(Imaginary<'a>), + Comparison(Comparison<'a>), + UnaryOperation(UnaryOperation<'a>), + BinaryOperation(BinaryOperation<'a>), + BooleanOperation(BooleanOperation<'a>), + Attribute(Attribute<'a>), + Tuple(Tuple<'a>), + Call(Call<'a>), + GeneratorExp(GeneratorExp<'a>), + ListComp(ListComp<'a>), + SetComp(SetComp<'a>), + DictComp(DictComp<'a>), + List(List<'a>), + Set(Set<'a>), + Dict(Dict<'a>), + Subscript(Subscript<'a>), + StarredElement(StarredElement<'a>), + IfExp(IfExp<'a>), + Lambda(Lambda<'a>), + Yield(Yield<'a>), + Await(Await<'a>), + SimpleString(SimpleString<'a>), + ConcatenatedString(ConcatenatedString<'a>), + FormattedString(FormattedString<'a>), + NamedExpr(NamedExpr<'a>), } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Ellipsis<'a> { pub lpar: Vec>, pub rpar: Vec>, @@ -491,16 +434,15 @@ impl<'a> Codegen<'a> for Ellipsis<'a> { }) } } -impl<'r, 'a> Inflate<'a> for DeflatedEllipsis<'r, 'a> { - type Inflated = Ellipsis<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { lpar, rpar }) +impl<'a> Inflate<'a> for Ellipsis<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Integer<'a> { /// A string representation of the integer, such as ``"100000"`` or /// ``"100_000"``. @@ -517,20 +459,15 @@ impl<'a> Codegen<'a> for Integer<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedInteger<'r, 'a> { - type Inflated = Integer<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - value: self.value, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for Integer<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Float<'a> { /// A string representation of the floating point number, such as ```"0.05"``, /// ``".050"``, or ``"5e-2"``. @@ -547,20 +484,15 @@ impl<'a> Codegen<'a> for Float<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedFloat<'r, 'a> { - type Inflated = Float<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - value: self.value, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for Float<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Imaginary<'a> { /// A string representation of the complex number, such as ``"2j"`` pub value: &'a str, @@ -576,20 +508,15 @@ impl<'a> Codegen<'a> for Imaginary<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedImaginary<'r, 'a> { - type Inflated = Imaginary<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - value: self.value, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for Imaginary<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Comparison<'a> { pub left: Box>, pub comparisons: Vec>, @@ -607,23 +534,17 @@ impl<'a> Codegen<'a> for Comparison<'a> { }) } } -impl<'r, 'a> Inflate<'a> for DeflatedComparison<'r, 'a> { - type Inflated = Comparison<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let left = self.left.inflate(config)?; - let comparisons = self.comparisons.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - left, - comparisons, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for Comparison<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.left = self.left.inflate(config)?; + self.comparisons = self.comparisons.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct UnaryOperation<'a> { pub operator: UnaryOp<'a>, pub expression: Box>, @@ -640,23 +561,17 @@ impl<'a> Codegen<'a> for UnaryOperation<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedUnaryOperation<'r, 'a> { - type Inflated = UnaryOperation<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let operator = self.operator.inflate(config)?; - let expression = self.expression.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - operator, - expression, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for UnaryOperation<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.operator = self.operator.inflate(config)?; + self.expression = self.expression.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct BinaryOperation<'a> { pub left: Box>, pub operator: BinaryOp<'a>, @@ -675,25 +590,18 @@ impl<'a> Codegen<'a> for BinaryOperation<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedBinaryOperation<'r, 'a> { - type Inflated = BinaryOperation<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let left = self.left.inflate(config)?; - let operator = self.operator.inflate(config)?; - let right = self.right.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - left, - operator, - right, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for BinaryOperation<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.left = self.left.inflate(config)?; + self.operator = self.operator.inflate(config)?; + self.right = self.right.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct BooleanOperation<'a> { pub left: Box>, pub operator: BooleanOp<'a>, @@ -712,25 +620,18 @@ impl<'a> Codegen<'a> for BooleanOperation<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedBooleanOperation<'r, 'a> { - type Inflated = BooleanOperation<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let left = self.left.inflate(config)?; - let operator = self.operator.inflate(config)?; - let right = self.right.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - left, - operator, - right, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for BooleanOperation<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.left = self.left.inflate(config)?; + self.operator = self.operator.inflate(config)?; + self.right = self.right.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Call<'a> { pub func: Box>, pub args: Vec>, @@ -743,22 +644,21 @@ pub struct Call<'a> { pub(crate) rpar_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedCall<'r, 'a> { - type Inflated = Call<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let func = self.func.inflate(config)?; - let whitespace_after_func = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for Call<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.func = self.func.inflate(config)?; + self.whitespace_after_func = parse_parenthesizable_whitespace( config, &mut (*self.lpar_tok).whitespace_before.borrow_mut(), )?; - let whitespace_before_args = parse_parenthesizable_whitespace( + self.whitespace_before_args = parse_parenthesizable_whitespace( config, &mut (*self.lpar_tok).whitespace_after.borrow_mut(), )?; - let mut args = self.args.inflate(config)?; + self.args = self.args.inflate(config)?; - if let Some(arg) = args.last_mut() { + if let Some(arg) = self.args.last_mut() { if arg.comma.is_none() { arg.whitespace_after_arg = parse_parenthesizable_whitespace( config, @@ -766,16 +666,9 @@ impl<'r, 'a> Inflate<'a> for DeflatedCall<'r, 'a> { )?; } } - let rpar = self.rpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - func, - args, - lpar, - rpar, - whitespace_after_func, - whitespace_before_args, - }) + Ok(self) } } @@ -795,7 +688,7 @@ impl<'a> Codegen<'a> for Call<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Attribute<'a> { pub value: Box>, pub attr: Name<'a>, @@ -804,21 +697,14 @@ pub struct Attribute<'a> { pub rpar: Vec>, } -impl<'r, 'a> Inflate<'a> for DeflatedAttribute<'r, 'a> { - type Inflated = Attribute<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let value = self.value.inflate(config)?; - let dot = self.dot.inflate(config)?; - let attr = self.attr.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - value, - attr, - dot, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for Attribute<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.value = self.value.inflate(config)?; + self.dot = self.dot.inflate(config)?; + self.attr = self.attr.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -832,22 +718,23 @@ impl<'a> Codegen<'a> for Attribute<'a> { } } -#[cst_node(Codegen, Inflate)] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] pub enum NameOrAttribute<'a> { - N(Box>), - A(Box>), + N(Name<'a>), + A(Attribute<'a>), } -impl<'r, 'a> std::convert::From> for DeflatedExpression<'r, 'a> { - fn from(x: DeflatedNameOrAttribute<'r, 'a>) -> Self { +impl<'a> std::convert::From> for Expression<'a> { + fn from(x: NameOrAttribute<'a>) -> Self { match x { - DeflatedNameOrAttribute::N(n) => Self::Name(n), - DeflatedNameOrAttribute::A(a) => Self::Attribute(a), + NameOrAttribute::N(n) => Self::Name(n), + NameOrAttribute::A(a) => Self::Attribute(a), } } } -#[cst_node] +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] pub struct ComparisonTarget<'a> { pub operator: CompOp<'a>, pub comparator: Expression<'a>, @@ -860,19 +747,15 @@ impl<'a> Codegen<'a> for ComparisonTarget<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedComparisonTarget<'r, 'a> { - type Inflated = ComparisonTarget<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let operator = self.operator.inflate(config)?; - let comparator = self.comparator.inflate(config)?; - Ok(Self::Inflated { - operator, - comparator, - }) +impl<'a> Inflate<'a> for ComparisonTarget<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.operator = self.operator.inflate(config)?; + self.comparator = self.comparator.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct StarredElement<'a> { pub value: Box>, pub comma: Option>, @@ -883,33 +766,25 @@ pub struct StarredElement<'a> { pub(crate) star_tok: TokenRef<'a>, } -impl<'r, 'a> DeflatedStarredElement<'r, 'a> { - pub fn inflate_element(self, config: &Config<'a>, is_last: bool) -> Result> { - let lpar = self.lpar.inflate(config)?; - let whitespace_before_value = parse_parenthesizable_whitespace( +impl<'a> StarredElement<'a> { + pub fn inflate_element(mut self, config: &Config<'a>, is_last: bool) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.whitespace_before_value = parse_parenthesizable_whitespace( config, &mut (*self.star_tok).whitespace_after.borrow_mut(), )?; - let value = self.value.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - let comma = if is_last { + self.value = self.value.inflate(config)?; + self.comma = if is_last { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(StarredElement { - value, - comma, - lpar, - rpar, - whitespace_before_value, - }) + Ok(self) } } -impl<'r, 'a> Inflate<'a> for DeflatedStarredElement<'r, 'a> { - type Inflated = StarredElement<'a>; - fn inflate(self, config: &Config<'a>) -> Result { +impl<'a> Inflate<'a> for StarredElement<'a> { + fn inflate(self, config: &Config<'a>) -> Result { self.inflate_element(config, false) } } @@ -928,17 +803,44 @@ impl<'a> Codegen<'a> for StarredElement<'a> { } #[allow(clippy::large_enum_variant)] -#[cst_node(NoIntoPy)] +#[derive(Debug, PartialEq, Eq, Clone)] pub enum Element<'a> { Simple { value: Expression<'a>, comma: Option>, }, - Starred(Box>), + Starred(StarredElement<'a>), +} + +// TODO: this could be a derive helper attribute to override the python class name +impl<'a> IntoPy for Element<'a> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + match self { + Self::Starred(s) => s.into_py(py), + Self::Simple { value, comma } => { + let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + let kwargs = [ + Some(("value", value.into_py(py))), + comma.map(|x| ("comma", x.into_py(py))), + ] + .iter() + .filter(|x| x.is_some()) + .map(|x| x.as_ref().unwrap()) + .collect::>() + .into_py_dict(py); + libcst + .getattr("Element") + .expect("no Element found in libcst") + .call((), Some(kwargs)) + .expect("conversion failed") + .into() + } + } + } } impl<'a> Element<'a> { - pub fn codegen( + fn codegen( &self, state: &mut CodegenState<'a>, default_comma: bool, @@ -961,12 +863,11 @@ impl<'a> Element<'a> { state.add_token(if default_comma_whitespace { ", " } else { "," }); } } -} -impl<'r, 'a> DeflatedElement<'r, 'a> { - pub fn inflate_element(self, config: &Config<'a>, is_last: bool) -> Result> { + + pub fn inflate_element(self, config: &Config<'a>, is_last: bool) -> Result { Ok(match self { - Self::Starred(s) => Element::Starred(Box::new(s.inflate_element(config, is_last)?)), - Self::Simple { value, comma } => Element::Simple { + Self::Starred(s) => Self::Starred(s.inflate_element(config, is_last)?), + Self::Simple { value, comma } => Self::Simple { value: value.inflate(config)?, comma: if is_last { comma.map(|c| c.inflate_before(config)).transpose()? @@ -978,51 +879,46 @@ impl<'r, 'a> DeflatedElement<'r, 'a> { } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedElement<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for Element<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { let comma = Some(comma); match self { Self::Simple { value, .. } => Self::Simple { comma, value }, - Self::Starred(mut s) => { - s.comma = comma; - Self::Starred(s) - } + Self::Starred(s) => Self::Starred(StarredElement { comma, ..s }), } } } -impl<'r, 'a> std::convert::From> for DeflatedElement<'r, 'a> { - fn from(e: DeflatedExpression<'r, 'a>) -> Self { +impl<'a> std::convert::From> for Element<'a> { + fn from(e: Expression<'a>) -> Self { match e { - DeflatedExpression::StarredElement(e) => Self::Starred(e), - value => Self::Simple { value, comma: None }, + Expression::StarredElement(e) => Element::Starred(e), + value => Element::Simple { value, comma: None }, } } } -#[cst_node(ParenthesizedNode, Default)] +#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode, IntoPy)] pub struct Tuple<'a> { pub elements: Vec>, pub lpar: Vec>, pub rpar: Vec>, } -impl<'r, 'a> Inflate<'a> for DeflatedTuple<'r, 'a> { - type Inflated = Tuple<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; +impl<'a> Inflate<'a> for Tuple<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result> { + self.lpar = self.lpar.inflate(config)?; let len = self.elements.len(); - let elements = self + self.elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) .collect::>>()?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - elements, - lpar, - rpar, - }) + if !self.elements.is_empty() { + // rpar only has whitespace if elements is non empty + self.rpar = self.rpar.inflate(config)?; + } + Ok(self) } } @@ -1041,7 +937,7 @@ impl<'a> Codegen<'a> for Tuple<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct GeneratorExp<'a> { pub elt: Box>, pub for_in: Box>, @@ -1058,23 +954,17 @@ impl<'a> Codegen<'a> for GeneratorExp<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedGeneratorExp<'r, 'a> { - type Inflated = GeneratorExp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let elt = self.elt.inflate(config)?; - let for_in = self.for_in.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - elt, - for_in, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for GeneratorExp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.elt = self.elt.inflate(config)?; + self.for_in = self.for_in.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct ListComp<'a> { pub elt: Box>, pub for_in: Box>, @@ -1095,28 +985,19 @@ impl<'a> Codegen<'a> for ListComp<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedListComp<'r, 'a> { - type Inflated = ListComp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let lbracket = self.lbracket.inflate(config)?; - let elt = self.elt.inflate(config)?; - let for_in = self.for_in.inflate(config)?; - let rbracket = self.rbracket.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - elt, - for_in, - lbracket, - rbracket, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for ListComp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbracket = self.lbracket.inflate(config)?; + self.elt = self.elt.inflate(config)?; + self.for_in = self.for_in.inflate(config)?; + self.rbracket = self.rbracket.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node] -#[derive(Default)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct LeftSquareBracket<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1129,19 +1010,17 @@ impl<'a> Codegen<'a> for LeftSquareBracket<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedLeftSquareBracket<'r, 'a> { - type Inflated = LeftSquareBracket<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for LeftSquareBracket<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(Self::Inflated { whitespace_after }) + Ok(self) } } -#[cst_node] -#[derive(Default)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct RightSquareBracket<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, @@ -1154,18 +1033,17 @@ impl<'a> Codegen<'a> for RightSquareBracket<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedRightSquareBracket<'r, 'a> { - type Inflated = RightSquareBracket<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for RightSquareBracket<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - Ok(Self::Inflated { whitespace_before }) + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct SetComp<'a> { pub elt: Box>, pub for_in: Box>, @@ -1175,23 +1053,15 @@ pub struct SetComp<'a> { pub rpar: Vec>, } -impl<'r, 'a> Inflate<'a> for DeflatedSetComp<'r, 'a> { - type Inflated = SetComp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let lbrace = self.lbrace.inflate(config)?; - let elt = self.elt.inflate(config)?; - let for_in = self.for_in.inflate(config)?; - let rbrace = self.rbrace.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - elt, - for_in, - lbrace, - rbrace, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for SetComp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; + self.elt = self.elt.inflate(config)?; + self.for_in = self.for_in.inflate(config)?; + self.rbrace = self.rbrace.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -1206,7 +1076,7 @@ impl<'a> Codegen<'a> for SetComp<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct DictComp<'a> { pub key: Box>, pub value: Box>, @@ -1221,35 +1091,24 @@ pub struct DictComp<'a> { pub(crate) colon_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedDictComp<'r, 'a> { - type Inflated = DictComp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let lbrace = self.lbrace.inflate(config)?; - let key = self.key.inflate(config)?; - let whitespace_before_colon = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for DictComp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; + self.key = self.key.inflate(config)?; + self.whitespace_before_colon = parse_parenthesizable_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - let whitespace_after_colon = parse_parenthesizable_whitespace( + self.whitespace_after_colon = parse_parenthesizable_whitespace( config, &mut (*self.colon_tok).whitespace_after.borrow_mut(), )?; - let value = self.value.inflate(config)?; - let for_in = self.for_in.inflate(config)?; - let rbrace = self.rbrace.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - key, - value, - for_in, - lbrace, - rbrace, - lpar, - rpar, - whitespace_before_colon, - whitespace_after_colon, - }) + self.value = self.value.inflate(config)?; + self.for_in = self.for_in.inflate(config)?; + self.rbrace = self.rbrace.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -1268,28 +1127,19 @@ impl<'a> Codegen<'a> for DictComp<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct LeftCurlyBrace<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, } -impl<'a> Default for LeftCurlyBrace<'a> { - fn default() -> Self { - Self { - whitespace_after: Default::default(), - } - } -} - -impl<'r, 'a> Inflate<'a> for DeflatedLeftCurlyBrace<'r, 'a> { - type Inflated = LeftCurlyBrace<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for LeftCurlyBrace<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(Self::Inflated { whitespace_after }) + Ok(self) } } @@ -1300,28 +1150,19 @@ impl<'a> Codegen<'a> for LeftCurlyBrace<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct RightCurlyBrace<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub(crate) tok: TokenRef<'a>, } -impl<'a> Default for RightCurlyBrace<'a> { - fn default() -> Self { - Self { - whitespace_before: Default::default(), - } - } -} - -impl<'r, 'a> Inflate<'a> for DeflatedRightCurlyBrace<'r, 'a> { - type Inflated = RightCurlyBrace<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for RightCurlyBrace<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - Ok(Self::Inflated { whitespace_before }) + Ok(self) } } @@ -1332,7 +1173,13 @@ impl<'a> Codegen<'a> for RightCurlyBrace<'a> { } } -#[cst_node] +impl<'a> pyo3::conversion::IntoPy for Box> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + (*self).into_py(py) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct CompFor<'a> { pub target: AssignTargetExpression<'a>, pub iter: Expression<'a>, @@ -1371,59 +1218,43 @@ impl<'a> Codegen<'a> for CompFor<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedCompFor<'r, 'a> { - type Inflated = CompFor<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let mut whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for CompFor<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.for_tok).whitespace_before.borrow_mut(), )?; - let asynchronous = if let Some(asy_tok) = self.async_tok.as_mut() { + if let (Some(asy_tok), Some(asy)) = (self.async_tok.as_mut(), self.asynchronous.as_mut()) { // If there is an async keyword, the start of the CompFor expression is // considered to be this keyword, so whitespace_before needs to adjust but // Asynchronous will own the whitespace before the for token. - let mut asy_whitespace_after = parse_parenthesizable_whitespace( + asy.whitespace_after = parse_parenthesizable_whitespace( config, &mut asy_tok.whitespace_before.borrow_mut(), )?; - swap(&mut asy_whitespace_after, &mut whitespace_before); - Some(Asynchronous { - whitespace_after: asy_whitespace_after, - }) - } else { - None - }; - let whitespace_after_for = parse_parenthesizable_whitespace( + swap(&mut asy.whitespace_after, &mut self.whitespace_before); + } + self.whitespace_after_for = parse_parenthesizable_whitespace( config, &mut (*self.for_tok).whitespace_after.borrow_mut(), )?; - let target = self.target.inflate(config)?; - let whitespace_before_in = parse_parenthesizable_whitespace( + self.target = self.target.inflate(config)?; + self.whitespace_before_in = parse_parenthesizable_whitespace( config, &mut (*self.in_tok).whitespace_before.borrow_mut(), )?; - let whitespace_after_in = parse_parenthesizable_whitespace( + self.whitespace_after_in = parse_parenthesizable_whitespace( config, &mut (*self.in_tok).whitespace_after.borrow_mut(), )?; - let iter = self.iter.inflate(config)?; - let ifs = self.ifs.inflate(config)?; - let inner_for_in = self.inner_for_in.inflate(config)?; - Ok(Self::Inflated { - target, - iter, - ifs, - inner_for_in, - asynchronous, - whitespace_before, - whitespace_after_for, - whitespace_before_in, - whitespace_after_in, - }) + self.iter = self.iter.inflate(config)?; + self.ifs = self.ifs.inflate(config)?; + self.inner_for_in = self.inner_for_in.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Asynchronous<'a> { pub whitespace_after: ParenthesizableWhitespace<'a>, } @@ -1435,13 +1266,7 @@ impl<'a> Codegen<'a> for Asynchronous<'a> { } } -pub(crate) fn make_async<'r, 'a>() -> DeflatedAsynchronous<'r, 'a> { - DeflatedAsynchronous { - _phantom: Default::default(), - } -} - -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct CompIf<'a> { pub test: Expression<'a>, pub whitespace_before: ParenthesizableWhitespace<'a>, @@ -1459,27 +1284,22 @@ impl<'a> Codegen<'a> for CompIf<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedCompIf<'r, 'a> { - type Inflated = CompIf<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for CompIf<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.if_tok).whitespace_before.borrow_mut(), )?; - let whitespace_before_test = parse_parenthesizable_whitespace( + self.whitespace_before_test = parse_parenthesizable_whitespace( config, &mut (*self.if_tok).whitespace_after.borrow_mut(), )?; - let test = self.test.inflate(config)?; - Ok(Self::Inflated { - test, - whitespace_before, - whitespace_before_test, - }) + self.test = self.test.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct List<'a> { pub elements: Vec>, pub lbracket: LeftSquareBracket<'a>, @@ -1488,32 +1308,23 @@ pub struct List<'a> { pub rpar: Vec>, } -impl<'r, 'a> Inflate<'a> for DeflatedList<'r, 'a> { - type Inflated = List<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let lbracket = self.lbracket.inflate(config)?; +impl<'a> Inflate<'a> for List<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbracket = self.lbracket.inflate(config)?; let len = self.elements.len(); - let elements = self + self.elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) - .collect::>>()?; - let rbracket = if !elements.is_empty() { + .collect::>()?; + if !self.elements.is_empty() { // lbracket owns all the whitespace if there are no elements - self.rbracket.inflate(config)? - } else { - Default::default() - }; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - elements, - lbracket, - rbracket, - lpar, - rpar, - }) + self.rbracket = self.rbracket.inflate(config)?; + } + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -1530,7 +1341,7 @@ impl<'a> Codegen<'a> for List<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Set<'a> { pub elements: Vec>, pub lbrace: LeftCurlyBrace<'a>, @@ -1539,31 +1350,22 @@ pub struct Set<'a> { pub rpar: Vec>, } -impl<'r, 'a> Inflate<'a> for DeflatedSet<'r, 'a> { - type Inflated = Set<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let lbrace = self.lbrace.inflate(config)?; +impl<'a> Inflate<'a> for Set<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; let len = self.elements.len(); - let elements = self + self.elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) - .collect::>>()?; - let rbrace = if !elements.is_empty() { - self.rbrace.inflate(config)? - } else { - Default::default() - }; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - elements, - lbrace, - rbrace, - lpar, - rpar, - }) + .collect::>()?; + if !self.elements.is_empty() { + self.rbrace = self.rbrace.inflate(config)?; + } + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -1580,7 +1382,7 @@ impl<'a> Codegen<'a> for Set<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Dict<'a> { pub elements: Vec>, pub lbrace: LeftCurlyBrace<'a>, @@ -1589,31 +1391,22 @@ pub struct Dict<'a> { pub rpar: Vec>, } -impl<'r, 'a> Inflate<'a> for DeflatedDict<'r, 'a> { - type Inflated = Dict<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let lbrace = self.lbrace.inflate(config)?; +impl<'a> Inflate<'a> for Dict<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; let len = self.elements.len(); - let elements = self + self.elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) - .collect::>>()?; - let rbrace = if !elements.is_empty() { - self.rbrace.inflate(config)? - } else { - Default::default() - }; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - elements, - lbrace, - rbrace, - lpar, - rpar, - }) + .collect::>()?; + if !self.elements.is_empty() { + self.rbrace = self.rbrace.inflate(config)?; + } + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -1630,7 +1423,8 @@ impl<'a> Codegen<'a> for Dict<'a> { } } -#[cst_node(NoIntoPy)] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone)] pub enum DictElement<'a> { Simple { key: Expression<'a>, @@ -1643,14 +1437,50 @@ pub enum DictElement<'a> { Starred(StarredDictElement<'a>), } -impl<'r, 'a> DeflatedDictElement<'r, 'a> { - pub fn inflate_element( - self, - config: &Config<'a>, - last_element: bool, - ) -> Result> { +// TODO: this could be a derive helper attribute to override the python class name +impl<'a> IntoPy for DictElement<'a> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + match self { + Self::Starred(s) => s.into_py(py), + Self::Simple { + key, + value, + comma, + whitespace_after_colon, + whitespace_before_colon, + .. + } => { + let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + let kwargs = [ + Some(("key", key.into_py(py))), + Some(("value", value.into_py(py))), + Some(( + "whitespace_before_colon", + whitespace_before_colon.into_py(py), + )), + Some(("whitespace_after_colon", whitespace_after_colon.into_py(py))), + comma.map(|x| ("comma", x.into_py(py))), + ] + .iter() + .filter(|x| x.is_some()) + .map(|x| x.as_ref().unwrap()) + .collect::>() + .into_py_dict(py); + libcst + .getattr("DictElement") + .expect("no Element found in libcst") + .call((), Some(kwargs)) + .expect("conversion failed") + .into() + } + } + } +} + +impl<'a> DictElement<'a> { + pub fn inflate_element(self, config: &Config<'a>, last_element: bool) -> Result { Ok(match self { - Self::Starred(s) => DictElement::Starred(s.inflate_element(config, last_element)?), + Self::Starred(s) => Self::Starred(s.inflate_element(config, last_element)?), Self::Simple { key, value, @@ -1666,7 +1496,7 @@ impl<'r, 'a> DeflatedDictElement<'r, 'a> { config, &mut colon_tok.whitespace_after.borrow_mut(), )?; - DictElement::Simple { + Self::Simple { key: key.inflate(config)?, whitespace_before_colon, whitespace_after_colon, @@ -1676,6 +1506,7 @@ impl<'r, 'a> DeflatedDictElement<'r, 'a> { } else { comma.inflate(config) }?, + colon_tok, } } }) @@ -1719,27 +1550,31 @@ impl<'a> DictElement<'a> { } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedDictElement<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for DictElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { let comma = Some(comma); match self { - Self::Starred(s) => Self::Starred(DeflatedStarredDictElement { comma, ..s }), + Self::Starred(s) => Self::Starred(StarredDictElement { comma, ..s }), Self::Simple { key, value, + whitespace_before_colon, + whitespace_after_colon, colon_tok, .. } => Self::Simple { comma, key, value, + whitespace_after_colon, + whitespace_before_colon, colon_tok, }, } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct StarredDictElement<'a> { pub value: Expression<'a>, pub comma: Option>, @@ -1748,27 +1583,19 @@ pub struct StarredDictElement<'a> { pub(crate) star_tok: TokenRef<'a>, } -impl<'r, 'a> DeflatedStarredDictElement<'r, 'a> { - fn inflate_element( - self, - config: &Config<'a>, - last_element: bool, - ) -> Result> { - let whitespace_before_value = parse_parenthesizable_whitespace( +impl<'a> StarredDictElement<'a> { + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.whitespace_before_value = parse_parenthesizable_whitespace( config, &mut (*self.star_tok).whitespace_after.borrow_mut(), )?; - let value = self.value.inflate(config)?; - let comma = if last_element { + self.value = self.value.inflate(config)?; + self.comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(StarredDictElement { - value, - comma, - whitespace_before_value, - }) + Ok(self) } } @@ -1783,80 +1610,50 @@ impl<'a> Codegen<'a> for StarredDictElement<'a> { } } -#[cst_node(Codegen, Inflate)] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] pub enum BaseSlice<'a> { - Index(Box>), - Slice(Box>), + Index(Index<'a>), + Slice(Slice<'a>), } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Index<'a> { pub value: Expression<'a>, - pub star: Option<&'a str>, - pub whitespace_after_star: Option>, - - pub(crate) star_tok: Option>, } -impl<'r, 'a> Inflate<'a> for DeflatedIndex<'r, 'a> { - type Inflated = Index<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let (star, whitespace_after_star) = if let Some(star_tok) = self.star_tok.as_mut() { - ( - Some(star_tok.string), - Some(parse_parenthesizable_whitespace( - config, - &mut star_tok.whitespace_after.borrow_mut(), - )?), - ) - } else { - (None, None) - }; - let value = self.value.inflate(config)?; - Ok(Self::Inflated { - value, - star, - whitespace_after_star, - }) +impl<'a> Inflate<'a> for Index<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.value = self.value.inflate(config)?; + Ok(self) } } impl<'a> Codegen<'a> for Index<'a> { fn codegen(&self, state: &mut CodegenState<'a>) { - if let Some(star) = self.star { - state.add_token(star); - } - self.whitespace_after_star.codegen(state); self.value.codegen(state); } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Slice<'a> { - #[cfg_attr(feature = "py", no_py_default)] + #[no_py_default] pub lower: Option>, - #[cfg_attr(feature = "py", no_py_default)] + #[no_py_default] pub upper: Option>, pub step: Option>, pub first_colon: Colon<'a>, pub second_colon: Option>, } -impl<'r, 'a> Inflate<'a> for DeflatedSlice<'r, 'a> { - type Inflated = Slice<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lower = self.lower.inflate(config)?; - let first_colon = self.first_colon.inflate(config)?; - let upper = self.upper.inflate(config)?; - let second_colon = self.second_colon.inflate(config)?; - let step = self.step.inflate(config)?; - Ok(Self::Inflated { - lower, - upper, - step, - first_colon, - second_colon, - }) +impl<'a> Inflate<'a> for Slice<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lower = self.lower.inflate(config)?; + self.first_colon = self.first_colon.inflate(config)?; + self.upper = self.upper.inflate(config)?; + self.second_colon = self.second_colon.inflate(config)?; + self.step = self.step.inflate(config)?; + Ok(self) } } @@ -1880,18 +1677,17 @@ impl<'a> Codegen<'a> for Slice<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct SubscriptElement<'a> { pub slice: BaseSlice<'a>, pub comma: Option>, } -impl<'r, 'a> Inflate<'a> for DeflatedSubscriptElement<'r, 'a> { - type Inflated = SubscriptElement<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let slice = self.slice.inflate(config)?; - let comma = self.comma.inflate(config)?; - Ok(Self::Inflated { slice, comma }) +impl<'a> Inflate<'a> for SubscriptElement<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.slice = self.slice.inflate(config)?; + self.comma = self.comma.inflate(config)?; + Ok(self) } } @@ -1904,7 +1700,7 @@ impl<'a> Codegen<'a> for SubscriptElement<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Subscript<'a> { pub value: Box>, pub slice: Vec>, @@ -1913,30 +1709,23 @@ pub struct Subscript<'a> { pub lpar: Vec>, pub rpar: Vec>, pub whitespace_after_value: ParenthesizableWhitespace<'a>, + + pub(crate) lbracket_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedSubscript<'r, 'a> { - type Inflated = Subscript<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let value = self.value.inflate(config)?; - let whitespace_after_value = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for Subscript<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.value = self.value.inflate(config)?; + self.whitespace_after_value = parse_parenthesizable_whitespace( config, - &mut self.lbracket.tok.whitespace_before.borrow_mut(), + &mut (*self.lbracket_tok).whitespace_before.borrow_mut(), )?; - let lbracket = self.lbracket.inflate(config)?; - let slice = self.slice.inflate(config)?; - let rbracket = self.rbracket.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - value, - slice, - lbracket, - rbracket, - lpar, - rpar, - whitespace_after_value, - }) + self.lbracket = self.lbracket.inflate(config)?; + self.slice = self.slice.inflate(config)?; + self.rbracket = self.rbracket.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -1958,7 +1747,7 @@ impl<'a> Codegen<'a> for Subscript<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct IfExp<'a> { pub test: Box>, pub body: Box>, @@ -1974,41 +1763,30 @@ pub struct IfExp<'a> { pub(crate) else_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedIfExp<'r, 'a> { - type Inflated = IfExp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let body = self.body.inflate(config)?; - let whitespace_before_if = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for IfExp<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.body = self.body.inflate(config)?; + self.whitespace_before_if = parse_parenthesizable_whitespace( config, &mut (*self.if_tok).whitespace_before.borrow_mut(), )?; - let whitespace_after_if = parse_parenthesizable_whitespace( + self.whitespace_after_if = parse_parenthesizable_whitespace( config, &mut (*self.if_tok).whitespace_after.borrow_mut(), )?; - let test = self.test.inflate(config)?; - let whitespace_before_else = parse_parenthesizable_whitespace( + self.test = self.test.inflate(config)?; + self.whitespace_before_else = parse_parenthesizable_whitespace( config, &mut (*self.else_tok).whitespace_before.borrow_mut(), )?; - let whitespace_after_else = parse_parenthesizable_whitespace( + self.whitespace_after_else = parse_parenthesizable_whitespace( config, &mut (*self.else_tok).whitespace_after.borrow_mut(), )?; - let orelse = self.orelse.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - test, - body, - orelse, - lpar, - rpar, - whitespace_before_if, - whitespace_after_if, - whitespace_before_else, - whitespace_after_else, - }) + self.orelse = self.orelse.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -2028,7 +1806,7 @@ impl<'a> Codegen<'a> for IfExp<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Lambda<'a> { pub params: Box>, pub body: Box>, @@ -2040,31 +1818,21 @@ pub struct Lambda<'a> { pub(crate) lambda_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedLambda<'r, 'a> { - type Inflated = Lambda<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let whitespace_after_lambda = if !self.params.is_empty() { - Some(parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for Lambda<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + if !self.params.is_empty() { + self.whitespace_after_lambda = Some(parse_parenthesizable_whitespace( config, &mut (*self.lambda_tok).whitespace_after.borrow_mut(), - )?) - } else { - Default::default() - }; - let mut params = self.params.inflate(config)?; - adjust_parameters_trailing_whitespace(config, &mut params, &self.colon.tok)?; - let colon = self.colon.inflate(config)?; - let body = self.body.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - params, - body, - colon, - lpar, - rpar, - whitespace_after_lambda, - }) + )?); + } + self.params = self.params.inflate(config)?; + adjust_parameters_trailing_whitespace(config, &mut self.params, &self.colon.tok)?; + self.colon = self.colon.inflate(config)?; + self.body = self.body.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -2085,7 +1853,7 @@ impl<'a> Codegen<'a> for Lambda<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct From<'a> { pub item: Expression<'a>, pub whitespace_before_from: Option>, @@ -2107,41 +1875,36 @@ impl<'a> From<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedFrom<'r, 'a> { - type Inflated = From<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before_from = Some(parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for From<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before_from = Some(parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?); - let whitespace_after_from = parse_parenthesizable_whitespace( + self.whitespace_after_from = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - let item = self.item.inflate(config)?; - Ok(Self::Inflated { - item, - whitespace_before_from, - whitespace_after_from, - }) + self.item = self.item.inflate(config)?; + Ok(self) } } -#[cst_node] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub enum YieldValue<'a> { - Expression(Box>), - From(Box>), + Expression(Expression<'a>), + From(From<'a>), } -impl<'r, 'a> Inflate<'a> for DeflatedYieldValue<'r, 'a> { - type Inflated = YieldValue<'a>; - fn inflate(self, config: &Config<'a>) -> Result { +impl<'a> Inflate<'a> for YieldValue<'a> { + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { - Self::Expression(e) => Self::Inflated::Expression(e.inflate(config)?), + Self::Expression(e) => Self::Expression(e.inflate(config)?), Self::From(e) => { let mut e = e.inflate(config)?; e.whitespace_before_from = None; - Self::Inflated::From(e) + Self::From(e) } }) } @@ -2156,7 +1919,13 @@ impl<'a> YieldValue<'a> { } } -#[cst_node(ParenthesizedNode)] +impl<'a> pyo3::conversion::IntoPy for Box> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + (*self).into_py(py) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Yield<'a> { pub value: Option>>, pub lpar: Vec>, @@ -2166,26 +1935,18 @@ pub struct Yield<'a> { pub(crate) yield_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedYield<'r, 'a> { - type Inflated = Yield<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let whitespace_after_yield = if self.value.is_some() { - Some(parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for Yield<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + if self.value.is_some() { + self.whitespace_after_yield = Some(parse_parenthesizable_whitespace( config, &mut (*self.yield_tok).whitespace_after.borrow_mut(), - )?) - } else { - Default::default() - }; - let value = self.value.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - value, - lpar, - rpar, - whitespace_after_yield, - }) + )?); + } + self.value = self.value.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -2206,7 +1967,7 @@ impl<'a> Codegen<'a> for Yield<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct Await<'a> { pub expression: Box>, pub lpar: Vec>, @@ -2216,22 +1977,16 @@ pub struct Await<'a> { pub(crate) await_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedAwait<'r, 'a> { - type Inflated = Await<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let whitespace_after_await = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for Await<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.whitespace_after_await = parse_parenthesizable_whitespace( config, &mut (*self.await_tok).whitespace_after.borrow_mut(), )?; - let expression = self.expression.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - expression, - lpar, - rpar, - whitespace_after_await, - }) + self.expression = self.expression.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -2245,26 +2000,25 @@ impl<'a> Codegen<'a> for Await<'a> { } } -#[cst_node(Codegen, Inflate)] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] pub enum String<'a> { Simple(SimpleString<'a>), Concatenated(ConcatenatedString<'a>), Formatted(FormattedString<'a>), - Templated(TemplatedString<'a>), } -impl<'r, 'a> std::convert::From> for DeflatedExpression<'r, 'a> { - fn from(s: DeflatedString<'r, 'a>) -> Self { +impl<'a> std::convert::From> for Expression<'a> { + fn from(s: String<'a>) -> Self { match s { - DeflatedString::Simple(s) => Self::SimpleString(Box::new(s)), - DeflatedString::Concatenated(s) => Self::ConcatenatedString(Box::new(s)), - DeflatedString::Formatted(s) => Self::FormattedString(Box::new(s)), - DeflatedString::Templated(s) => Self::TemplatedString(Box::new(s)), + String::Simple(s) => Self::SimpleString(s), + String::Concatenated(s) => Self::ConcatenatedString(s), + String::Formatted(s) => Self::FormattedString(s), } } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct ConcatenatedString<'a> { pub left: Box>, pub right: Box>, @@ -2277,24 +2031,17 @@ pub struct ConcatenatedString<'a> { pub(crate) right_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedConcatenatedString<'r, 'a> { - type Inflated = ConcatenatedString<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let left = self.left.inflate(config)?; - let whitespace_between = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for ConcatenatedString<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.left = self.left.inflate(config)?; + self.whitespace_between = parse_parenthesizable_whitespace( config, &mut (*self.right_tok).whitespace_before.borrow_mut(), )?; - let right = self.right.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - left, - right, - lpar, - rpar, - whitespace_between, - }) + self.right = self.right.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -2308,7 +2055,7 @@ impl<'a> Codegen<'a> for ConcatenatedString<'a> { } } -#[cst_node(ParenthesizedNode, Default)] +#[derive(Debug, PartialEq, Eq, Clone, Default, ParenthesizedNode, IntoPy)] pub struct SimpleString<'a> { /// The texual representation of the string, including quotes, prefix /// characters, and any escape characters present in the original source code, @@ -2318,16 +2065,11 @@ pub struct SimpleString<'a> { pub rpar: Vec>, } -impl<'r, 'a> Inflate<'a> for DeflatedSimpleString<'r, 'a> { - type Inflated = SimpleString<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - value: self.value, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for SimpleString<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -2337,151 +2079,14 @@ impl<'a> Codegen<'a> for SimpleString<'a> { } } -#[cst_node] -pub struct TemplatedStringText<'a> { - pub value: &'a str, -} - -impl<'r, 'a> Inflate<'a> for DeflatedTemplatedStringText<'r, 'a> { - type Inflated = TemplatedStringText<'a>; - fn inflate(self, _config: &Config<'a>) -> Result { - Ok(Self::Inflated { value: self.value }) - } -} - -impl<'a> Codegen<'a> for TemplatedStringText<'a> { - fn codegen(&self, state: &mut CodegenState<'a>) { - state.add_token(self.value); - } -} - -pub(crate) fn make_tstringtext<'r, 'a>(value: &'a str) -> DeflatedTemplatedStringText<'r, 'a> { - DeflatedTemplatedStringText { - value, - _phantom: Default::default(), - } -} - -#[cst_node] -pub struct TemplatedStringExpression<'a> { - // This represents the part of a t-string that is insde the brackets '{' and '}'. - pub expression: Expression<'a>, - pub conversion: Option<&'a str>, - pub format_spec: Option>>, - pub whitespace_before_expression: ParenthesizableWhitespace<'a>, - pub whitespace_after_expression: ParenthesizableWhitespace<'a>, - pub equal: Option>, - - pub(crate) lbrace_tok: TokenRef<'a>, - // This is None if there's an equal sign, otherwise it's the first token of - // (conversion, format spec, right brace) in that order - pub(crate) after_expr_tok: Option>, -} - -impl<'r, 'a> Inflate<'a> for DeflatedTemplatedStringExpression<'r, 'a> { - type Inflated = TemplatedStringExpression<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let whitespace_before_expression = parse_parenthesizable_whitespace( - config, - &mut (*self.lbrace_tok).whitespace_after.borrow_mut(), - )?; - let expression = self.expression.inflate(config)?; - let equal = self.equal.inflate(config)?; - let whitespace_after_expression = if let Some(after_expr_tok) = self.after_expr_tok.as_mut() - { - parse_parenthesizable_whitespace( - config, - &mut after_expr_tok.whitespace_before.borrow_mut(), - )? - } else { - Default::default() - }; - let format_spec = self.format_spec.inflate(config)?; - Ok(Self::Inflated { - expression, - conversion: self.conversion, - format_spec, - whitespace_before_expression, - whitespace_after_expression, - equal, - }) - } -} - -impl<'a> Codegen<'a> for TemplatedStringExpression<'a> { - fn codegen(&self, state: &mut CodegenState<'a>) { - state.add_token("{"); - self.whitespace_before_expression.codegen(state); - self.expression.codegen(state); - if let Some(eq) = &self.equal { - eq.codegen(state); - } - self.whitespace_after_expression.codegen(state); - if let Some(conv) = &self.conversion { - state.add_token("!"); - state.add_token(conv); - } - if let Some(specs) = &self.format_spec { - state.add_token(":"); - for spec in specs { - spec.codegen(state); - } - } - state.add_token("}"); - } -} - -#[cst_node(ParenthesizedNode)] -pub struct TemplatedString<'a> { - pub parts: Vec>, - pub start: &'a str, - pub end: &'a str, - pub lpar: Vec>, - pub rpar: Vec>, -} - -impl<'r, 'a> Inflate<'a> for DeflatedTemplatedString<'r, 'a> { - type Inflated = TemplatedString<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let parts = self.parts.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - parts, - start: self.start, - end: self.end, - lpar, - rpar, - }) - } -} - -impl<'a> Codegen<'a> for TemplatedString<'a> { - fn codegen(&self, state: &mut CodegenState<'a>) { - self.parenthesize(state, |state| { - state.add_token(self.start); - for part in &self.parts { - part.codegen(state); - } - state.add_token(self.end); - }) - } -} - -#[cst_node(Codegen, Inflate)] -pub enum TemplatedStringContent<'a> { - Text(TemplatedStringText<'a>), - Expression(Box>), -} -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct FormattedStringText<'a> { pub value: &'a str, } -impl<'r, 'a> Inflate<'a> for DeflatedFormattedStringText<'r, 'a> { - type Inflated = FormattedStringText<'a>; - fn inflate(self, _config: &Config<'a>) -> Result { - Ok(Self::Inflated { value: self.value }) +impl<'a> Inflate<'a> for FormattedStringText<'a> { + fn inflate(self, _config: &Config<'a>) -> Result { + Ok(self) } } @@ -2491,14 +2096,7 @@ impl<'a> Codegen<'a> for FormattedStringText<'a> { } } -pub(crate) fn make_fstringtext<'r, 'a>(value: &'a str) -> DeflatedFormattedStringText<'r, 'a> { - DeflatedFormattedStringText { - value, - _phantom: Default::default(), - } -} - -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct FormattedStringExpression<'a> { pub expression: Expression<'a>, pub conversion: Option<&'a str>, @@ -2513,33 +2111,22 @@ pub struct FormattedStringExpression<'a> { pub(crate) after_expr_tok: Option>, } -impl<'r, 'a> Inflate<'a> for DeflatedFormattedStringExpression<'r, 'a> { - type Inflated = FormattedStringExpression<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let whitespace_before_expression = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for FormattedStringExpression<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before_expression = parse_parenthesizable_whitespace( config, &mut (*self.lbrace_tok).whitespace_after.borrow_mut(), )?; - let expression = self.expression.inflate(config)?; - let equal = self.equal.inflate(config)?; - let whitespace_after_expression = if let Some(after_expr_tok) = self.after_expr_tok.as_mut() - { - parse_parenthesizable_whitespace( + self.expression = self.expression.inflate(config)?; + self.equal = self.equal.inflate(config)?; + if let Some(after_expr_tok) = self.after_expr_tok.as_mut() { + self.whitespace_after_expression = parse_parenthesizable_whitespace( config, &mut after_expr_tok.whitespace_before.borrow_mut(), - )? - } else { - Default::default() - }; - let format_spec = self.format_spec.inflate(config)?; - Ok(Self::Inflated { - expression, - conversion: self.conversion, - format_spec, - whitespace_before_expression, - whitespace_after_expression, - equal, - }) + )?; + } + self.format_spec = self.format_spec.inflate(config)?; + Ok(self) } } @@ -2566,13 +2153,14 @@ impl<'a> Codegen<'a> for FormattedStringExpression<'a> { } } -#[cst_node(Codegen, Inflate)] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, Inflate, IntoPy)] pub enum FormattedStringContent<'a> { Text(FormattedStringText<'a>), - Expression(Box>), + Expression(FormattedStringExpression<'a>), } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct FormattedString<'a> { pub parts: Vec>, pub start: &'a str, @@ -2581,19 +2169,12 @@ pub struct FormattedString<'a> { pub rpar: Vec>, } -impl<'r, 'a> Inflate<'a> for DeflatedFormattedString<'r, 'a> { - type Inflated = FormattedString<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let parts = self.parts.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - parts, - start: self.start, - end: self.end, - lpar, - rpar, - }) +impl<'a> Inflate<'a> for FormattedString<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.parts = self.parts.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } @@ -2609,7 +2190,7 @@ impl<'a> Codegen<'a> for FormattedString<'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, ParenthesizedNode, IntoPy)] pub struct NamedExpr<'a> { pub target: Box>, pub value: Box>, @@ -2634,112 +2215,20 @@ impl<'a> Codegen<'a> for NamedExpr<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedNamedExpr<'r, 'a> { - type Inflated = NamedExpr<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let target = self.target.inflate(config)?; - let whitespace_before_walrus = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for NamedExpr<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.target = self.target.inflate(config)?; + self.whitespace_before_walrus = parse_parenthesizable_whitespace( config, &mut self.walrus_tok.whitespace_before.borrow_mut(), )?; - let whitespace_after_walrus = parse_parenthesizable_whitespace( + self.whitespace_after_walrus = parse_parenthesizable_whitespace( config, &mut self.walrus_tok.whitespace_after.borrow_mut(), )?; - let value = self.value.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - target, - value, - lpar, - rpar, - whitespace_before_walrus, - whitespace_after_walrus, - }) - } -} - -#[cfg(feature = "py")] -mod py { - - use pyo3::types::PyAnyMethods; - use pyo3::types::PyModule; - - use super::*; - use crate::nodes::traits::py::TryIntoPy; - - // TODO: this could be a derive helper attribute to override the python class name - impl<'a> TryIntoPy> for Element<'a> { - fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult> { - match self { - Self::Starred(s) => s.try_into_py(py), - Self::Simple { value, comma } => { - let libcst = PyModule::import(py, "libcst")?; - let kwargs = [ - Some(("value", value.try_into_py(py)?)), - comma - .map(|x| x.try_into_py(py)) - .transpose()? - .map(|x| ("comma", x)), - ] - .iter() - .filter(|x| x.is_some()) - .map(|x| x.as_ref().unwrap()) - .collect::>() - .into_py_dict(py)?; - Ok(libcst - .getattr("Element") - .expect("no Element found in libcst") - .call((), Some(&kwargs))? - .into()) - } - } - } - } - - // TODO: this could be a derive helper attribute to override the python class name - impl<'a> TryIntoPy> for DictElement<'a> { - fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult> { - match self { - Self::Starred(s) => s.try_into_py(py), - Self::Simple { - key, - value, - comma, - whitespace_after_colon, - whitespace_before_colon, - .. - } => { - let libcst = PyModule::import(py, "libcst")?; - let kwargs = [ - Some(("key", key.try_into_py(py)?)), - Some(("value", value.try_into_py(py)?)), - Some(( - "whitespace_before_colon", - whitespace_before_colon.try_into_py(py)?, - )), - Some(( - "whitespace_after_colon", - whitespace_after_colon.try_into_py(py)?, - )), - comma - .map(|x| x.try_into_py(py)) - .transpose()? - .map(|x| ("comma", x)), - ] - .iter() - .filter(|x| x.is_some()) - .map(|x| x.as_ref().unwrap()) - .collect::>() - .into_py_dict(py)?; - Ok(libcst - .getattr("DictElement") - .expect("no Element found in libcst") - .call((), Some(&kwargs))? - .into()) - } - } - } + self.value = self.value.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } diff --git a/native/libcst/src/nodes/inflate_helpers.rs b/native/libcst/src/nodes/inflate_helpers.rs index 8bf5c8af..262de5ac 100644 --- a/native/libcst/src/nodes/inflate_helpers.rs +++ b/native/libcst/src/nodes/inflate_helpers.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst/src/nodes/macros.rs b/native/libcst/src/nodes/macros.rs index 11028b8c..76498b8e 100644 --- a/native/libcst/src/nodes/macros.rs +++ b/native/libcst/src/nodes/macros.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. @@ -17,8 +17,8 @@ macro_rules! py_import { ( $module_name:expr, $member_name:expr, $getter_fn:ident ) => { paste::paste! { static [] - : pyo3::once_cell::PyOnceLock>> - = pyo3::once_cell::PyOnceLock::new(); + : pyo3::once_cell::GILOnceCell> + = pyo3::once_cell::GILOnceCell::new(); fn $getter_fn<'py>(py: pyo3::Python<'py>) -> pyo3::PyResult<&'py pyo3::PyAny> { Ok([].get_or_init(py, || { diff --git a/native/libcst/src/nodes/mod.rs b/native/libcst/src/nodes/mod.rs index e044db94..b6be09df 100644 --- a/native/libcst/src/nodes/mod.rs +++ b/native/libcst/src/nodes/mod.rs @@ -1,14 +1,14 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree -pub(crate) mod whitespace; +mod whitespace; pub use whitespace::{ Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, SimpleWhitespace, TrailingWhitespace, }; -pub(crate) mod statement; +mod statement; pub use statement::{ AnnAssign, Annotation, AsName, Assert, Assign, AssignTarget, AssignTargetExpression, AugAssign, Break, ClassDef, CompoundStatement, Continue, Decorator, Del, DelTargetExpression, Else, @@ -18,11 +18,10 @@ pub use statement::{ MatchPattern, MatchSequence, MatchSequenceElement, MatchSingleton, MatchStar, MatchTuple, MatchValue, NameItem, Nonlocal, OrElse, Pass, Raise, Return, SimpleStatementLine, SimpleStatementSuite, SmallStatement, StarrableMatchSequenceElement, Statement, Suite, Try, - TryStar, TypeAlias, TypeParam, TypeParameters, TypeVar, TypeVarLike, TypeVarTuple, While, With, - WithItem, + TryStar, While, With, WithItem, }; -pub(crate) mod expression; +mod expression; pub use expression::{ Arg, Asynchronous, Attribute, Await, BaseSlice, BinaryOperation, BooleanOperation, Call, CompFor, CompIf, Comparison, ComparisonTarget, ConcatenatedString, Dict, DictComp, DictElement, @@ -31,101 +30,22 @@ pub use expression::{ Integer, Lambda, LeftCurlyBrace, LeftParen, LeftSquareBracket, List, ListComp, Name, NameOrAttribute, NamedExpr, Param, ParamSlash, ParamStar, Parameters, RightCurlyBrace, RightParen, RightSquareBracket, Set, SetComp, SimpleString, Slice, StarArg, StarredDictElement, - StarredElement, String, Subscript, SubscriptElement, TemplatedString, TemplatedStringContent, - TemplatedStringExpression, Tuple, UnaryOperation, Yield, YieldValue, + StarredElement, String, Subscript, SubscriptElement, Tuple, UnaryOperation, Yield, YieldValue, }; -pub(crate) mod op; +mod op; pub use op::{ AssignEqual, AugOp, BinaryOp, BitOr, BooleanOp, Colon, Comma, CompOp, Dot, ImportStar, Semicolon, UnaryOp, }; -pub(crate) mod module; +mod module; pub use module::Module; mod codegen; pub use codegen::{Codegen, CodegenState}; -pub(crate) mod traits; +mod traits; pub use traits::{Inflate, ParenthesizedNode, WithComma, WithLeadingLines}; pub(crate) mod inflate_helpers; - -pub(crate) mod deflated { - pub use super::expression::{ - DeflatedArg as Arg, DeflatedAsynchronous as Asynchronous, DeflatedAttribute as Attribute, - DeflatedAwait as Await, DeflatedBaseSlice as BaseSlice, - DeflatedBinaryOperation as BinaryOperation, DeflatedBooleanOperation as BooleanOperation, - DeflatedCall as Call, DeflatedCompFor as CompFor, DeflatedCompIf as CompIf, - DeflatedComparison as Comparison, DeflatedComparisonTarget as ComparisonTarget, - DeflatedConcatenatedString as ConcatenatedString, DeflatedDict as Dict, - DeflatedDictComp as DictComp, DeflatedDictElement as DictElement, - DeflatedElement as Element, DeflatedEllipsis as Ellipsis, DeflatedExpression as Expression, - DeflatedFloat as Float, DeflatedFormattedString as FormattedString, - DeflatedFormattedStringContent as FormattedStringContent, - DeflatedFormattedStringExpression as FormattedStringExpression, - DeflatedFormattedStringText as FormattedStringText, DeflatedFrom as From, - DeflatedGeneratorExp as GeneratorExp, DeflatedIfExp as IfExp, - DeflatedImaginary as Imaginary, DeflatedIndex as Index, DeflatedInteger as Integer, - DeflatedLambda as Lambda, DeflatedLeftCurlyBrace as LeftCurlyBrace, - DeflatedLeftParen as LeftParen, DeflatedLeftSquareBracket as LeftSquareBracket, - DeflatedList as List, DeflatedListComp as ListComp, DeflatedName as Name, - DeflatedNameOrAttribute as NameOrAttribute, DeflatedNamedExpr as NamedExpr, - DeflatedParam as Param, DeflatedParamSlash as ParamSlash, DeflatedParamStar as ParamStar, - DeflatedParameters as Parameters, DeflatedRightCurlyBrace as RightCurlyBrace, - DeflatedRightParen as RightParen, DeflatedRightSquareBracket as RightSquareBracket, - DeflatedSet as Set, DeflatedSetComp as SetComp, DeflatedSimpleString as SimpleString, - DeflatedSlice as Slice, DeflatedStarArg as StarArg, - DeflatedStarredDictElement as StarredDictElement, DeflatedStarredElement as StarredElement, - DeflatedString as String, DeflatedSubscript as Subscript, - DeflatedSubscriptElement as SubscriptElement, DeflatedTemplatedString as TemplatedString, - DeflatedTemplatedStringContent as TemplatedStringContent, - DeflatedTemplatedStringExpression as TemplatedStringExpression, - DeflatedTemplatedStringText as TemplatedStringText, DeflatedTuple as Tuple, - DeflatedUnaryOperation as UnaryOperation, DeflatedYield as Yield, - DeflatedYieldValue as YieldValue, - }; - pub use super::module::DeflatedModule as Module; - pub use super::op::{ - DeflatedAssignEqual as AssignEqual, DeflatedAugOp as AugOp, DeflatedBinaryOp as BinaryOp, - DeflatedBitOr as BitOr, DeflatedBooleanOp as BooleanOp, DeflatedColon as Colon, - DeflatedComma as Comma, DeflatedCompOp as CompOp, DeflatedDot as Dot, - DeflatedImportStar as ImportStar, DeflatedSemicolon as Semicolon, - DeflatedUnaryOp as UnaryOp, - }; - pub use super::statement::{ - DeflatedAnnAssign as AnnAssign, DeflatedAnnotation as Annotation, DeflatedAsName as AsName, - DeflatedAssert as Assert, DeflatedAssign as Assign, DeflatedAssignTarget as AssignTarget, - DeflatedAssignTargetExpression as AssignTargetExpression, DeflatedAugAssign as AugAssign, - DeflatedBreak as Break, DeflatedClassDef as ClassDef, - DeflatedCompoundStatement as CompoundStatement, DeflatedContinue as Continue, - DeflatedDecorator as Decorator, DeflatedDel as Del, - DeflatedDelTargetExpression as DelTargetExpression, DeflatedElse as Else, - DeflatedExceptHandler as ExceptHandler, DeflatedExceptStarHandler as ExceptStarHandler, - DeflatedExpr as Expr, DeflatedFinally as Finally, DeflatedFor as For, - DeflatedFunctionDef as FunctionDef, DeflatedGlobal as Global, DeflatedIf as If, - DeflatedImport as Import, DeflatedImportAlias as ImportAlias, - DeflatedImportFrom as ImportFrom, DeflatedImportNames as ImportNames, - DeflatedIndentedBlock as IndentedBlock, DeflatedMatch as Match, DeflatedMatchAs as MatchAs, - DeflatedMatchCase as MatchCase, DeflatedMatchClass as MatchClass, - DeflatedMatchKeywordElement as MatchKeywordElement, DeflatedMatchList as MatchList, - DeflatedMatchMapping as MatchMapping, DeflatedMatchMappingElement as MatchMappingElement, - DeflatedMatchOr as MatchOr, DeflatedMatchOrElement as MatchOrElement, - DeflatedMatchPattern as MatchPattern, DeflatedMatchSequence as MatchSequence, - DeflatedMatchSequenceElement as MatchSequenceElement, - DeflatedMatchSingleton as MatchSingleton, DeflatedMatchStar as MatchStar, - DeflatedMatchTuple as MatchTuple, DeflatedMatchValue as MatchValue, - DeflatedNameItem as NameItem, DeflatedNonlocal as Nonlocal, DeflatedOrElse as OrElse, - DeflatedParamSpec as ParamSpec, DeflatedPass as Pass, DeflatedRaise as Raise, - DeflatedReturn as Return, DeflatedSimpleStatementLine as SimpleStatementLine, - DeflatedSimpleStatementSuite as SimpleStatementSuite, - DeflatedSmallStatement as SmallStatement, - DeflatedStarrableMatchSequenceElement as StarrableMatchSequenceElement, - DeflatedStatement as Statement, DeflatedSuite as Suite, DeflatedTry as Try, - DeflatedTryStar as TryStar, DeflatedTypeAlias as TypeAlias, DeflatedTypeParam as TypeParam, - DeflatedTypeParameters as TypeParameters, DeflatedTypeVar as TypeVar, - DeflatedTypeVarLike as TypeVarLike, DeflatedTypeVarTuple as TypeVarTuple, - DeflatedWhile as While, DeflatedWith as With, DeflatedWithItem as WithItem, - }; -} diff --git a/native/libcst/src/nodes/module.rs b/native/libcst/src/nodes/module.rs index ff9a2a73..03c6afb5 100644 --- a/native/libcst/src/nodes/module.rs +++ b/native/libcst/src/nodes/module.rs @@ -1,29 +1,28 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. use std::mem::swap; +use std::rc::Rc; use crate::tokenizer::whitespace_parser::parse_empty_lines; use crate::tokenizer::Token; use crate::{ nodes::{ codegen::{Codegen, CodegenState}, - statement::*, + statement::Statement, whitespace::EmptyLine, }, tokenizer::whitespace_parser::Config, }; -use libcst_derive::cst_node; -#[cfg(feature = "py")] -use libcst_derive::TryIntoPy; +use libcst_derive::IntoPy; use super::traits::{Inflate, Result, WithLeadingLines}; -type TokenRef<'r, 'a> = &'r Token<'a>; +type TokenRef<'a> = Rc>; -#[cst_node] +#[derive(Debug, Eq, PartialEq, IntoPy)] pub struct Module<'a> { pub body: Vec>, pub header: Vec>, @@ -51,20 +50,19 @@ impl<'a> Codegen<'a> for Module<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedModule<'r, 'a> { - type Inflated = Module<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let default_indent = config.default_indent; - let default_newline = config.default_newline; - let has_trailing_newline = config.has_trailing_newline(); - let mut body = self.body.inflate(config)?; +impl<'a> Inflate<'a> for Module<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.default_indent = config.default_indent; + self.default_newline = config.default_newline; + self.has_trailing_newline = config.has_trailing_newline(); + self.body = self.body.inflate(config)?; let mut footer = parse_empty_lines( config, &mut (*self.eof_tok).whitespace_before.borrow_mut(), Some(""), )?; let mut header = vec![]; - if let Some(stmt) = body.first_mut() { + if let Some(stmt) = self.body.first_mut() { swap(stmt.leading_lines(), &mut header); let mut last_indented = None; for (num, line) in footer.iter().enumerate() { @@ -77,20 +75,18 @@ impl<'r, 'a> Inflate<'a> for DeflatedModule<'r, 'a> { } } if let Some(num) = last_indented { - let (_, rest) = footer.split_at(num); - footer = rest.to_vec(); + if num + 1 == footer.len() { + footer = vec![]; + } else { + let (_, rest) = footer.split_at(num + 1); + footer = rest.to_vec(); + } } } else { swap(&mut header, &mut footer); } - Ok(Self::Inflated { - body, - header, - footer, - default_indent, - default_newline, - has_trailing_newline, - encoding: self.encoding, - }) + self.footer = footer; + self.header = header; + Ok(self) } } diff --git a/native/libcst/src/nodes/op.rs b/native/libcst/src/nodes/op.rs index 3e02483e..48b9839f 100644 --- a/native/libcst/src/nodes/op.rs +++ b/native/libcst/src/nodes/op.rs @@ -1,8 +1,10 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. +use std::rc::Rc; + use super::{whitespace::ParenthesizableWhitespace, Codegen, CodegenState}; use crate::{ nodes::traits::{Inflate, Result}, @@ -11,20 +13,18 @@ use crate::{ Token, }, }; -use libcst_derive::cst_node; -#[cfg(feature = "py")] -use libcst_derive::TryIntoPy; +use libcst_derive::IntoPy; -type TokenRef<'r, 'a> = &'r Token<'a>; +type TokenRef<'a> = Rc>; -#[cst_node] +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] pub struct Semicolon<'a> { /// Any space that appears directly before this semicolon. pub whitespace_before: ParenthesizableWhitespace<'a>, /// Any space that appears directly after this semicolon. pub whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] pub(crate) tok: TokenRef<'a>, } @@ -36,30 +36,26 @@ impl<'a> Codegen<'a> for Semicolon<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedSemicolon<'r, 'a> { - type Inflated = Semicolon<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before = ParenthesizableWhitespace::SimpleWhitespace( +impl<'a> Inflate<'a> for Semicolon<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = ParenthesizableWhitespace::SimpleWhitespace( parse_simple_whitespace(config, &mut (*self.tok).whitespace_before.borrow_mut())?, ); - let whitespace_after = ParenthesizableWhitespace::SimpleWhitespace( + self.whitespace_after = ParenthesizableWhitespace::SimpleWhitespace( parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?, ); - Ok(Self::Inflated { - whitespace_before, - whitespace_after, - }) + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Comma<'a> { /// Any space that appears directly before this comma. pub whitespace_before: ParenthesizableWhitespace<'a>, /// Any space that appears directly after this comma. pub whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] pub(crate) tok: TokenRef<'a>, } @@ -71,46 +67,38 @@ impl<'a> Codegen<'a> for Comma<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedComma<'r, 'a> { - type Inflated = Comma<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for Comma<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - let whitespace_after = parse_parenthesizable_whitespace( + self.whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(Self::Inflated { - whitespace_before, - whitespace_after, - }) + Ok(self) } } -impl<'r, 'a> DeflatedComma<'r, 'a> { - pub fn inflate_before(self, config: &Config<'a>) -> Result> { - let whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Comma<'a> { + pub fn inflate_before(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - let whitespace_after = Default::default(); - Ok(Comma { - whitespace_before, - whitespace_after, - }) + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct AssignEqual<'a> { /// Any space that appears directly before this equal sign. pub whitespace_before: ParenthesizableWhitespace<'a>, /// Any space that appears directly after this equal sign. pub whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] pub(crate) tok: TokenRef<'a>, } @@ -122,32 +110,28 @@ impl<'a> Codegen<'a> for AssignEqual<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedAssignEqual<'r, 'a> { - type Inflated = AssignEqual<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for AssignEqual<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - let whitespace_after = parse_parenthesizable_whitespace( + self.whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(Self::Inflated { - whitespace_before, - whitespace_after, - }) + Ok(self) } } -#[cst_node] +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] pub struct Dot<'a> { /// Any space that appears directly before this dot. pub whitespace_before: ParenthesizableWhitespace<'a>, /// Any space that appears directly after this dot. pub whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] pub(crate) tok: TokenRef<'a>, } @@ -159,70 +143,67 @@ impl<'a> Codegen<'a> for Dot<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedDot<'r, 'a> { - type Inflated = Dot<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let whitespace_before = self.inflate_before(config)?; - let whitespace_after = self.inflate_after(config)?; - Ok(Self::Inflated { - whitespace_before, - whitespace_after, - }) +impl<'a> Inflate<'a> for Dot<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.inflate_before(config)?; + self.inflate_after(config)?; + Ok(self) } } -impl<'r, 'a> DeflatedDot<'r, 'a> { - fn inflate_before(&mut self, config: &Config<'a>) -> Result> { - parse_parenthesizable_whitespace(config, &mut (*self.tok).whitespace_before.borrow_mut()) +impl<'a> Dot<'a> { + fn inflate_before(&mut self, config: &Config<'a>) -> Result<()> { + self.whitespace_before = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_before.borrow_mut(), + )?; + Ok(()) } - fn inflate_after(&mut self, config: &Config<'a>) -> Result> { - parse_parenthesizable_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut()) + fn inflate_after(&mut self, config: &Config<'a>) -> Result<()> { + self.whitespace_after = parse_parenthesizable_whitespace( + config, + &mut (*self.tok).whitespace_after.borrow_mut(), + )?; + Ok(()) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct ImportStar {} -pub(crate) fn make_importstar<'r, 'a>() -> DeflatedImportStar<'r, 'a> { - DeflatedImportStar { - _phantom: Default::default(), - } -} - impl<'a> Codegen<'a> for ImportStar { fn codegen(&self, state: &mut CodegenState<'a>) { state.add_token("*"); } } -impl<'r, 'a> Inflate<'a> for DeflatedImportStar<'r, 'a> { - type Inflated = ImportStar; - fn inflate(self, _config: &Config<'a>) -> Result { - Ok(ImportStar {}) +impl<'a> Inflate<'a> for ImportStar { + fn inflate(self, _config: &Config<'a>) -> Result { + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub enum UnaryOp<'a> { Plus { whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, Minus { whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, BitInvert { whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, Not { whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, } @@ -248,54 +229,65 @@ impl<'a> Codegen<'a> for UnaryOp<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedUnaryOp<'r, 'a> { - type Inflated = UnaryOp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { +impl<'a> Inflate<'a> for UnaryOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::Plus { tok, .. } => { let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Plus { whitespace_after } + Self::Plus { + whitespace_after, + tok, + } } Self::Minus { tok, .. } => { let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Minus { whitespace_after } + Self::Minus { + whitespace_after, + tok, + } } Self::BitInvert { tok, .. } => { let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::BitInvert { whitespace_after } + Self::BitInvert { + whitespace_after, + tok, + } } Self::Not { tok, .. } => { let whitespace_after = parse_parenthesizable_whitespace( config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Not { whitespace_after } + Self::Not { + whitespace_after, + tok, + } } }) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub enum BooleanOp<'a> { And { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, Or { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, } @@ -320,9 +312,8 @@ impl<'a> Codegen<'a> for BooleanOp<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedBooleanOp<'r, 'a> { - type Inflated = BooleanOp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { +impl<'a> Inflate<'a> for BooleanOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::And { tok, .. } => { let whitespace_before = parse_parenthesizable_whitespace( @@ -333,9 +324,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBooleanOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::And { + Self::And { whitespace_before, whitespace_after, + tok, } } Self::Or { tok, .. } => { @@ -347,177 +339,175 @@ impl<'r, 'a> Inflate<'a> for DeflatedBooleanOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Or { + Self::Or { whitespace_before, whitespace_after, + tok, } } }) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub enum BinaryOp<'a> { Add { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, Subtract { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, Multiply { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, Divide { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, FloorDivide { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, Modulo { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, Power { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, LeftShift { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, RightShift { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, BitOr { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, BitAnd { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, BitXor { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, MatrixMultiply { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, } impl<'a> Codegen<'a> for BinaryOp<'a> { fn codegen(&self, state: &mut CodegenState<'a>) { - let (whitespace_before, whitespace_after) = match self { + let (whitespace_before, whitespace_after, tok) = match self { Self::Add { whitespace_before, whitespace_after, + tok, } | Self::Subtract { whitespace_before, whitespace_after, + tok, } | Self::Multiply { whitespace_before, whitespace_after, + tok, } | Self::Divide { whitespace_before, whitespace_after, + tok, } | Self::FloorDivide { whitespace_before, whitespace_after, + tok, } | Self::Modulo { whitespace_before, whitespace_after, + tok, } | Self::Power { whitespace_before, whitespace_after, + tok, } | Self::LeftShift { whitespace_before, whitespace_after, + tok, } | Self::RightShift { whitespace_before, whitespace_after, + tok, } | Self::BitOr { whitespace_before, whitespace_after, + tok, } | Self::BitAnd { whitespace_before, whitespace_after, + tok, } | Self::BitXor { whitespace_before, whitespace_after, + tok, } | Self::MatrixMultiply { whitespace_before, whitespace_after, - } => (whitespace_before, whitespace_after), - }; - let tok = match self { - BinaryOp::Add { .. } => "+", - BinaryOp::Subtract { .. } => "-", - BinaryOp::Multiply { .. } => "*", - BinaryOp::Divide { .. } => "/", - BinaryOp::FloorDivide { .. } => "//", - BinaryOp::Modulo { .. } => "%", - BinaryOp::Power { .. } => "**", - BinaryOp::LeftShift { .. } => "<<", - BinaryOp::RightShift { .. } => ">>", - BinaryOp::BitOr { .. } => "|", - BinaryOp::BitAnd { .. } => "&", - BinaryOp::BitXor { .. } => "^", - BinaryOp::MatrixMultiply { .. } => "@", + tok, + } => (whitespace_before, whitespace_after, tok), }; whitespace_before.codegen(state); - state.add_token(tok); + state.add_token(tok.string); whitespace_after.codegen(state); } } -impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { - type Inflated = BinaryOp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { +impl<'a> Inflate<'a> for BinaryOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::Add { tok, .. } => { let whitespace_before = parse_parenthesizable_whitespace( @@ -528,9 +518,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Add { + Self::Add { whitespace_before, whitespace_after, + tok, } } Self::Subtract { tok, .. } => { @@ -542,9 +533,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Subtract { + Self::Subtract { whitespace_before, whitespace_after, + tok, } } Self::Multiply { tok, .. } => { @@ -556,9 +548,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Multiply { + Self::Multiply { whitespace_before, whitespace_after, + tok, } } Self::Divide { tok, .. } => { @@ -570,9 +563,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Divide { + Self::Divide { whitespace_before, whitespace_after, + tok, } } Self::FloorDivide { tok, .. } => { @@ -584,9 +578,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::FloorDivide { + Self::FloorDivide { whitespace_before, whitespace_after, + tok, } } Self::Modulo { tok, .. } => { @@ -598,9 +593,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Modulo { + Self::Modulo { whitespace_before, whitespace_after, + tok, } } Self::Power { tok, .. } => { @@ -612,9 +608,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Power { + Self::Power { whitespace_before, whitespace_after, + tok, } } Self::LeftShift { tok, .. } => { @@ -626,9 +623,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::LeftShift { + Self::LeftShift { whitespace_before, whitespace_after, + tok, } } Self::RightShift { tok, .. } => { @@ -640,9 +638,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::RightShift { + Self::RightShift { whitespace_before, whitespace_after, + tok, } } Self::BitOr { tok, .. } => { @@ -654,9 +653,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::BitOr { + Self::BitOr { whitespace_before, whitespace_after, + tok, } } Self::BitAnd { tok, .. } => { @@ -668,9 +668,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::BitAnd { + Self::BitAnd { whitespace_before, whitespace_after, + tok, } } Self::BitXor { tok, .. } => { @@ -682,9 +683,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::BitXor { + Self::BitXor { whitespace_before, whitespace_after, + tok, } } Self::MatrixMultiply { tok, .. } => { @@ -696,164 +698,166 @@ impl<'r, 'a> Inflate<'a> for DeflatedBinaryOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::MatrixMultiply { + Self::MatrixMultiply { whitespace_before, whitespace_after, + tok, } } }) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub enum CompOp<'a> { LessThan { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, GreaterThan { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, LessThanEqual { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, GreaterThanEqual { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, Equal { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, NotEqual { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, In { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, NotIn { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_between: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] not_tok: TokenRef<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] in_tok: TokenRef<'a>, }, Is { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, IsNot { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_between: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] is_tok: TokenRef<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] not_tok: TokenRef<'a>, }, } impl<'a> Codegen<'a> for CompOp<'a> { fn codegen(&self, state: &mut CodegenState<'a>) { - let (bef, aft, between) = match self { + let (bef, aft, first_tok, between) = match self { Self::LessThan { whitespace_before, whitespace_after, + tok, } | Self::GreaterThan { whitespace_before, whitespace_after, + tok, } | Self::LessThanEqual { whitespace_before, whitespace_after, + tok, } | Self::GreaterThanEqual { whitespace_before, whitespace_after, + tok, } | Self::Equal { whitespace_before, whitespace_after, + tok, } | Self::NotEqual { whitespace_before, whitespace_after, + tok, } | Self::In { whitespace_before, whitespace_after, + tok, } | Self::Is { whitespace_before, whitespace_after, - } => (whitespace_before, whitespace_after, None), + tok, + } => (whitespace_before, whitespace_after, tok, None), Self::IsNot { whitespace_before, whitespace_between, whitespace_after, + is_tok, + not_tok, } => ( whitespace_before, whitespace_after, - Some(whitespace_between), + is_tok, + Some((whitespace_between, not_tok)), ), Self::NotIn { whitespace_before, whitespace_between, whitespace_after, + not_tok, + in_tok, } => ( whitespace_before, whitespace_after, - Some(whitespace_between), + not_tok, + Some((whitespace_between, in_tok)), ), }; - let (first_tok, second_tok) = match self { - CompOp::LessThan { .. } => ("<", None), - CompOp::GreaterThan { .. } => (">", None), - CompOp::LessThanEqual { .. } => ("<=", None), - CompOp::GreaterThanEqual { .. } => (">=", None), - CompOp::Equal { .. } => ("==", None), - CompOp::NotEqual { .. } => ("!=", None), - CompOp::In { .. } => ("in", None), - CompOp::NotIn { .. } => ("not", Some("in")), - CompOp::Is { .. } => ("is", None), - CompOp::IsNot { .. } => ("is", Some("not")), - }; bef.codegen(state); - state.add_token(first_tok); - if let (Some(btw), Some(second_tok)) = (between, second_tok) { + state.add_token(first_tok.string); + if let Some((btw, second_tok)) = between { btw.codegen(state); - state.add_token(second_tok); + state.add_token(second_tok.string); } aft.codegen(state); } } -impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { - type Inflated = CompOp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { +impl<'a> Inflate<'a> for CompOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::LessThan { tok, .. } => { let whitespace_before = parse_parenthesizable_whitespace( @@ -864,9 +868,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::LessThan { + Self::LessThan { whitespace_before, whitespace_after, + tok, } } Self::GreaterThan { tok, .. } => { @@ -878,9 +883,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::GreaterThan { + Self::GreaterThan { whitespace_before, whitespace_after, + tok, } } Self::LessThanEqual { tok, .. } => { @@ -892,9 +898,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::LessThanEqual { + Self::LessThanEqual { whitespace_before, whitespace_after, + tok, } } Self::GreaterThanEqual { tok, .. } => { @@ -906,9 +913,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::GreaterThanEqual { + Self::GreaterThanEqual { whitespace_before, whitespace_after, + tok, } } Self::Equal { tok, .. } => { @@ -920,9 +928,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Equal { + Self::Equal { whitespace_before, whitespace_after, + tok, } } Self::NotEqual { tok, .. } => { @@ -934,9 +943,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::NotEqual { + Self::NotEqual { whitespace_before, whitespace_after, + tok, } } Self::In { tok, .. } => { @@ -948,9 +958,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::In { + Self::In { whitespace_before, whitespace_after, + tok, } } Self::Is { tok, .. } => { @@ -962,9 +973,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::Is { + Self::Is { whitespace_before, whitespace_after, + tok, } } Self::IsNot { @@ -982,10 +994,12 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*not_tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::IsNot { + Self::IsNot { whitespace_before, whitespace_between, whitespace_after, + is_tok, + not_tok, } } Self::NotIn { @@ -1003,40 +1017,38 @@ impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { config, &mut (*in_tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::NotIn { + Self::NotIn { whitespace_before, whitespace_between, whitespace_after, + not_tok, + in_tok, } } }) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Colon<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] pub(crate) tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedColon<'r, 'a> { - type Inflated = Colon<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for Colon<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - let whitespace_after = parse_parenthesizable_whitespace( + self.whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(Self::Inflated { - whitespace_before, - whitespace_after, - }) + Ok(self) } } @@ -1048,91 +1060,90 @@ impl<'a> Codegen<'a> for Colon<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub enum AugOp<'a> { AddAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, SubtractAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, MultiplyAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, MatrixMultiplyAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, DivideAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, ModuloAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, BitAndAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, BitOrAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, BitXorAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, LeftShiftAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, RightShiftAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, PowerAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, FloorDivideAssign { whitespace_before: ParenthesizableWhitespace<'a>, whitespace_after: ParenthesizableWhitespace<'a>, - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] tok: TokenRef<'a>, }, } -impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { - type Inflated = AugOp<'a>; - fn inflate(self, config: &Config<'a>) -> Result { +impl<'a> Inflate<'a> for AugOp<'a> { + fn inflate(self, config: &Config<'a>) -> Result { Ok(match self { Self::AddAssign { tok, .. } => { let whitespace_before = parse_parenthesizable_whitespace( @@ -1143,9 +1154,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::AddAssign { + Self::AddAssign { whitespace_before, whitespace_after, + tok, } } Self::SubtractAssign { tok, .. } => { @@ -1157,9 +1169,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::SubtractAssign { + Self::SubtractAssign { whitespace_before, whitespace_after, + tok, } } Self::MultiplyAssign { tok, .. } => { @@ -1171,9 +1184,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::MultiplyAssign { + Self::MultiplyAssign { whitespace_before, whitespace_after, + tok, } } Self::MatrixMultiplyAssign { tok, .. } => { @@ -1185,9 +1199,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::MatrixMultiplyAssign { + Self::MatrixMultiplyAssign { whitespace_before, whitespace_after, + tok, } } Self::DivideAssign { tok, .. } => { @@ -1199,9 +1214,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::DivideAssign { + Self::DivideAssign { whitespace_before, whitespace_after, + tok, } } Self::ModuloAssign { tok, .. } => { @@ -1213,9 +1229,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::ModuloAssign { + Self::ModuloAssign { whitespace_before, whitespace_after, + tok, } } Self::BitAndAssign { tok, .. } => { @@ -1227,9 +1244,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::BitAndAssign { + Self::BitAndAssign { whitespace_before, whitespace_after, + tok, } } Self::BitOrAssign { tok, .. } => { @@ -1241,9 +1259,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::BitOrAssign { + Self::BitOrAssign { whitespace_before, whitespace_after, + tok, } } Self::BitXorAssign { tok, .. } => { @@ -1255,9 +1274,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::BitXorAssign { + Self::BitXorAssign { whitespace_before, whitespace_after, + tok, } } Self::LeftShiftAssign { tok, .. } => { @@ -1269,9 +1289,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::LeftShiftAssign { + Self::LeftShiftAssign { whitespace_before, whitespace_after, + tok, } } Self::RightShiftAssign { tok, .. } => { @@ -1283,9 +1304,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::RightShiftAssign { + Self::RightShiftAssign { whitespace_before, whitespace_after, + tok, } } Self::PowerAssign { tok, .. } => { @@ -1297,9 +1319,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::PowerAssign { + Self::PowerAssign { whitespace_before, whitespace_after, + tok, } } Self::FloorDivideAssign { tok, .. } => { @@ -1311,9 +1334,10 @@ impl<'r, 'a> Inflate<'a> for DeflatedAugOp<'r, 'a> { config, &mut (*tok).whitespace_after.borrow_mut(), )?; - Self::Inflated::FloorDivideAssign { + Self::FloorDivideAssign { whitespace_before, whitespace_after, + tok, } } }) @@ -1395,7 +1419,7 @@ impl<'a> Codegen<'a> for AugOp<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct BitOr<'a> { pub whitespace_before: ParenthesizableWhitespace<'a>, pub whitespace_after: ParenthesizableWhitespace<'a>, @@ -1403,21 +1427,17 @@ pub struct BitOr<'a> { pub(crate) tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedBitOr<'r, 'a> { - type Inflated = BitOr<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for BitOr<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?; - let whitespace_after = parse_parenthesizable_whitespace( + self.whitespace_after = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - Ok(Self::Inflated { - whitespace_before, - whitespace_after, - }) + Ok(self) } } diff --git a/native/libcst/src/nodes/parser_config.rs b/native/libcst/src/nodes/parser_config.rs index e274d8df..acf140ee 100644 --- a/native/libcst/src/nodes/parser_config.rs +++ b/native/libcst/src/nodes/parser_config.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. @@ -29,12 +29,12 @@ impl BaseWhitespaceParserConfig { } #[getter] - fn get_lines(&self, py: Python) -> Py { + fn get_lines(&self, py: Python) -> PyObject { self.lines.to_object(py) } #[getter] - fn get_default_newline(&self, py: Python) -> Py { + fn get_default_newline(&self, py: Python) -> PyObject { self.default_newline.to_object(py) } } @@ -62,23 +62,23 @@ impl BaseWhitespaceParserConfig { } } -// These fields are private and Py, since we don't currently care about using them from +// These fields are private and PyObject, since we don't currently care about using them from // within rust. #[pyclass(extends=BaseWhitespaceParserConfig, module="libcst_native.parser_config")] #[text_signature = "(*, lines, encoding, default_indent, default_newline, has_trailing_newline, version, future_imports)"] pub struct ParserConfig { // lines is inherited #[pyo3(get)] - encoding: Py, + encoding: PyObject, #[pyo3(get)] - default_indent: Py, + default_indent: PyObject, // default_newline is inherited #[pyo3(get)] - has_trailing_newline: Py, + has_trailing_newline: PyObject, #[pyo3(get)] - version: Py, + version: PyObject, #[pyo3(get)] - future_imports: Py, + future_imports: PyObject, } #[pymethods] @@ -86,12 +86,12 @@ impl ParserConfig { #[new] fn new( lines: &PySequence, - encoding: Py, - default_indent: Py, + encoding: PyObject, + default_indent: PyObject, default_newline: &PyString, - has_trailing_newline: Py, - version: Py, - future_imports: Py, + has_trailing_newline: PyObject, + version: PyObject, + future_imports: PyObject, ) -> PyResult<(Self, BaseWhitespaceParserConfig)> { Ok(( Self { @@ -126,7 +126,6 @@ fn parser_config_asdict<'py>(py: Python<'py>, config: PyRef<'py, ParserConfig>) ("future_imports", config.future_imports.clone_ref(py)), ] .into_py_dict(py) - .unwrap() } pub fn init_module(_py: Python, m: &PyModule) -> PyResult<()> { diff --git a/native/libcst/src/nodes/py_cached.rs b/native/libcst/src/nodes/py_cached.rs index 307082da..0a7fe691 100644 --- a/native/libcst/src/nodes/py_cached.rs +++ b/native/libcst/src/nodes/py_cached.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. @@ -7,11 +7,11 @@ use pyo3::prelude::*; use std::convert::AsRef; use std::ops::Deref; -/// An immutable wrapper around a rust type T and its Py equivalent. Caches the conversion -/// to and from the Py. +/// An immutable wrapper around a rust type T and it's PyObject equivalent. Caches the conversion +/// to and from the PyObject. pub struct PyCached { native: T, - py_object: Py, + py_object: PyObject, } impl PyCached @@ -31,7 +31,7 @@ where T: FromPyObject<'source>, { fn extract(ob: &'source PyAny) -> PyResult { - Python::attach(|py| { + Python::with_gil(|py| { Ok(PyCached { native: ob.extract()?, py_object: ob.to_object(py), @@ -40,14 +40,14 @@ where } } -impl IntoPy> for PyCached { - fn into_py(self, _py: Python) -> Py { +impl IntoPy for PyCached { + fn into_py(self, _py: Python) -> PyObject { self.py_object } } impl ToPyObject for PyCached { - fn to_object(&self, py: Python) -> Py { + fn to_object(&self, py: Python) -> PyObject { self.py_object.clone_ref(py) } } @@ -71,6 +71,6 @@ where T: ToPyObject, { fn from(val: T) -> Self { - Python::attach(|py| Self::new(py, val)) + Python::with_gil(|py| Self::new(py, val)) } } diff --git a/native/libcst/src/nodes/statement.rs b/native/libcst/src/nodes/statement.rs index d40ef340..21725343 100644 --- a/native/libcst/src/nodes/statement.rs +++ b/native/libcst/src/nodes/statement.rs @@ -1,9 +1,9 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. -use std::mem::swap; +use std::{mem::swap, rc::Rc}; use super::{ inflate_helpers::adjust_parameters_trailing_whitespace, Attribute, Codegen, CodegenState, @@ -13,12 +13,8 @@ use super::{ }; use crate::{ nodes::{ - expression::*, - op::*, - traits::{ - Inflate, ParenthesizedDeflatedNode, ParenthesizedNode, Result, WithComma, - WithLeadingLines, - }, + traits::{Inflate, Result, WithComma, WithLeadingLines}, + Arg, AssignEqual, Asynchronous, AugOp, BitOr, Element, ParenthesizedNode, }, tokenizer::{ whitespace_parser::{ @@ -29,14 +25,12 @@ use crate::{ }, LeftCurlyBrace, LeftSquareBracket, RightCurlyBrace, RightSquareBracket, }; -#[cfg(feature = "py")] -use libcst_derive::TryIntoPy; -use libcst_derive::{cst_node, Codegen, Inflate, ParenthesizedDeflatedNode, ParenthesizedNode}; +use libcst_derive::{Codegen, Inflate, IntoPy, ParenthesizedNode}; -type TokenRef<'r, 'a> = &'r Token<'a>; +type TokenRef<'a> = Rc>; #[allow(clippy::large_enum_variant)] -#[cst_node(Inflate, Codegen)] +#[derive(Debug, Eq, PartialEq, Clone, Inflate, Codegen, IntoPy)] pub enum Statement<'a> { Simple(SimpleStatementLine<'a>), Compound(CompoundStatement<'a>), @@ -51,8 +45,8 @@ impl<'a> WithLeadingLines<'a> for Statement<'a> { } } +#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen, IntoPy)] #[allow(clippy::large_enum_variant)] -#[cst_node(Inflate, Codegen)] pub enum CompoundStatement<'a> { FunctionDef(FunctionDef<'a>), If(If<'a>), @@ -81,13 +75,13 @@ impl<'a> WithLeadingLines<'a> for CompoundStatement<'a> { } } -#[cst_node(Inflate, Codegen)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen, IntoPy)] pub enum Suite<'a> { IndentedBlock(IndentedBlock<'a>), SimpleStatementSuite(SimpleStatementSuite<'a>), } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct IndentedBlock<'a> { /// Sequence of statements belonging to this indented block. pub body: Vec>, @@ -144,10 +138,9 @@ impl<'a> Codegen<'a> for IndentedBlock<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedIndentedBlock<'r, 'a> { - type Inflated = IndentedBlock<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let body = self.body.inflate(config)?; +impl<'a> Inflate<'a> for IndentedBlock<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.body = self.body.inflate(config)?; // We want to be able to only keep comments in the footer that are actually for // this IndentedBlock. We do so by assuming that lines which are indented to the // same level as the block itself are comments that go at the footer of the @@ -168,20 +161,17 @@ impl<'r, 'a> Inflate<'a> for DeflatedIndentedBlock<'r, 'a> { config, &mut (*self.newline_tok).whitespace_before.borrow_mut(), )?; - let mut indent = self.indent_tok.relative_indent; - if indent == Some(config.default_indent) { - indent = None; + self.footer = footer; + self.header = header; + self.indent = self.indent_tok.relative_indent; + if self.indent == Some(config.default_indent) { + self.indent = None; } - Ok(Self::Inflated { - body, - header, - indent, - footer, - }) + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct SimpleStatementSuite<'a> { /// Sequence of small statements. All but the last statement are required to have /// a semicolon. @@ -196,23 +186,18 @@ pub struct SimpleStatementSuite<'a> { pub(crate) newline_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedSimpleStatementSuite<'r, 'a> { - type Inflated = SimpleStatementSuite<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_whitespace = parse_simple_whitespace( +impl<'a> Inflate<'a> for SimpleStatementSuite<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_whitespace = parse_simple_whitespace( config, &mut (*self.first_tok).whitespace_before.borrow_mut(), )?; - let body = self.body.inflate(config)?; - let trailing_whitespace = parse_trailing_whitespace( + self.body = self.body.inflate(config)?; + self.trailing_whitespace = parse_trailing_whitespace( config, &mut (*self.newline_tok).whitespace_before.borrow_mut(), )?; - Ok(Self::Inflated { - body, - leading_whitespace, - trailing_whitespace, - }) + Ok(self) } } @@ -240,7 +225,7 @@ impl<'a> Codegen<'a> for SimpleStatementSuite<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct SimpleStatementLine<'a> { /// Sequence of small statements. All but the last statement are required to have /// a semicolon. @@ -265,29 +250,24 @@ impl<'a> Codegen<'a> for SimpleStatementLine<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedSimpleStatementLine<'r, 'a> { - type Inflated = SimpleStatementLine<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for SimpleStatementLine<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.first_tok).whitespace_before.borrow_mut(), None, )?; - let body = self.body.inflate(config)?; - let trailing_whitespace = parse_trailing_whitespace( + self.body = self.body.inflate(config)?; + self.trailing_whitespace = parse_trailing_whitespace( config, &mut (*self.newline_tok).whitespace_before.borrow_mut(), )?; - Ok(Self::Inflated { - body, - leading_lines, - trailing_whitespace, - }) + Ok(self) } } #[allow(dead_code, clippy::large_enum_variant)] -#[cst_node(Codegen, Inflate)] +#[derive(Debug, Eq, PartialEq, Clone, Codegen, Inflate, IntoPy)] pub enum SmallStatement<'a> { Pass(Pass<'a>), Break(Break<'a>), @@ -304,11 +284,10 @@ pub enum SmallStatement<'a> { Nonlocal(Nonlocal<'a>), AugAssign(AugAssign<'a>), Del(Del<'a>), - TypeAlias(TypeAlias<'a>), } -impl<'r, 'a> DeflatedSmallStatement<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> SmallStatement<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { match self { Self::Pass(p) => Self::Pass(p.with_semicolon(semicolon)), Self::Break(p) => Self::Break(p.with_semicolon(semicolon)), @@ -325,17 +304,16 @@ impl<'r, 'a> DeflatedSmallStatement<'r, 'a> { Self::Nonlocal(l) => Self::Nonlocal(l.with_semicolon(semicolon)), Self::AugAssign(a) => Self::AugAssign(a.with_semicolon(semicolon)), Self::Del(d) => Self::Del(d.with_semicolon(semicolon)), - Self::TypeAlias(t) => Self::TypeAlias(t.with_semicolon(semicolon)), } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Pass<'a> { pub semicolon: Option>, } -impl<'r, 'a> DeflatedPass<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Pass<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon } } } @@ -345,20 +323,19 @@ impl<'a> Codegen<'a> for Pass<'a> { self.semicolon.codegen(state); } } -impl<'r, 'a> Inflate<'a> for DeflatedPass<'r, 'a> { - type Inflated = Pass<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { semicolon }) +impl<'a> Inflate<'a> for Pass<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Break<'a> { pub semicolon: Option>, } -impl<'r, 'a> DeflatedBreak<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Break<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon } } } @@ -368,20 +345,19 @@ impl<'a> Codegen<'a> for Break<'a> { self.semicolon.codegen(state); } } -impl<'r, 'a> Inflate<'a> for DeflatedBreak<'r, 'a> { - type Inflated = Break<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { semicolon }) +impl<'a> Inflate<'a> for Break<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Continue<'a> { pub semicolon: Option>, } -impl<'r, 'a> DeflatedContinue<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Continue<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon } } } @@ -391,21 +367,20 @@ impl<'a> Codegen<'a> for Continue<'a> { self.semicolon.codegen(state); } } -impl<'r, 'a> Inflate<'a> for DeflatedContinue<'r, 'a> { - type Inflated = Continue<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { semicolon }) +impl<'a> Inflate<'a> for Continue<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Expr<'a> { pub value: Expression<'a>, pub semicolon: Option>, } -impl<'r, 'a> DeflatedExpr<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Expr<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } @@ -415,16 +390,15 @@ impl<'a> Codegen<'a> for Expr<'a> { self.semicolon.codegen(state); } } -impl<'r, 'a> Inflate<'a> for DeflatedExpr<'r, 'a> { - type Inflated = Expr<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let value = self.value.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { value, semicolon }) +impl<'a> Inflate<'a> for Expr<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.value = self.value.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Assign<'a> { pub targets: Vec>, pub value: Expression<'a>, @@ -443,27 +417,22 @@ impl<'a> Codegen<'a> for Assign<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedAssign<'r, 'a> { - type Inflated = Assign<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let targets = self.targets.inflate(config)?; - let value = self.value.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - targets, - value, - semicolon, - }) +impl<'a> Inflate<'a> for Assign<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.targets = self.targets.inflate(config)?; + self.value = self.value.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } -impl<'r, 'a> DeflatedAssign<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Assign<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct AssignTarget<'a> { pub target: AssignTargetExpression<'a>, pub whitespace_before_equal: SimpleWhitespace<'a>, @@ -481,36 +450,31 @@ impl<'a> Codegen<'a> for AssignTarget<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedAssignTarget<'r, 'a> { - type Inflated = AssignTarget<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let target = self.target.inflate(config)?; - let whitespace_before_equal = parse_simple_whitespace( +impl<'a> Inflate<'a> for AssignTarget<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.target = self.target.inflate(config)?; + self.whitespace_before_equal = parse_simple_whitespace( config, &mut (*self.equal_tok).whitespace_before.borrow_mut(), )?; - let whitespace_after_equal = + self.whitespace_after_equal = parse_simple_whitespace(config, &mut (*self.equal_tok).whitespace_after.borrow_mut())?; - Ok(Self::Inflated { - target, - whitespace_before_equal, - whitespace_after_equal, - }) + Ok(self) } } #[allow(clippy::large_enum_variant)] -#[cst_node(Codegen, ParenthesizedNode, Inflate)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate, IntoPy)] pub enum AssignTargetExpression<'a> { - Name(Box>), - Attribute(Box>), - StarredElement(Box>), - Tuple(Box>), - List(Box>), - Subscript(Box>), + Name(Name<'a>), + Attribute(Attribute<'a>), + StarredElement(StarredElement<'a>), + Tuple(Tuple<'a>), + List(List<'a>), + Subscript(Subscript<'a>), } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Import<'a> { pub names: Vec>, pub semicolon: Option>, @@ -535,32 +499,27 @@ impl<'a> Codegen<'a> for Import<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedImport<'r, 'a> { - type Inflated = Import<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_import = parse_simple_whitespace( +impl<'a> Inflate<'a> for Import<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_import = parse_simple_whitespace( config, &mut (*self.import_tok).whitespace_after.borrow_mut(), )?; - let names = self.names.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - names, - semicolon, - whitespace_after_import, - }) + self.names = self.names.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } -impl<'r, 'a> DeflatedImport<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Import<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct ImportFrom<'a> { - #[cfg_attr(feature = "py", no_py_default)] + #[no_py_default] pub module: Option>, pub names: ImportNames<'a>, pub relative: Vec>, @@ -602,119 +561,95 @@ impl<'a> Codegen<'a> for ImportFrom<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedImportFrom<'r, 'a> { - type Inflated = ImportFrom<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_from = +impl<'a> Inflate<'a> for ImportFrom<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_from = parse_simple_whitespace(config, &mut (*self.from_tok).whitespace_after.borrow_mut())?; - let module = self.module.inflate(config)?; + self.module = self.module.inflate(config)?; - let whitespace_after_import = parse_simple_whitespace( + self.whitespace_after_import = parse_simple_whitespace( config, &mut (*self.import_tok).whitespace_after.borrow_mut(), )?; - let mut relative = inflate_dots(self.relative, config)?; - let mut whitespace_before_import = Default::default(); + self.relative = inflate_dots(self.relative, config)?; - if !relative.is_empty() && module.is_none() { + if !self.relative.is_empty() && self.module.is_none() { // For relative-only imports relocate the space after the final dot to be owned // by the import token. if let Some(Dot { whitespace_after: ParenthesizableWhitespace::SimpleWhitespace(dot_ws), .. - }) = relative.last_mut() + }) = self.relative.last_mut() { - swap(dot_ws, &mut whitespace_before_import); + swap(dot_ws, &mut self.whitespace_before_import); } } else { - whitespace_before_import = parse_simple_whitespace( + self.whitespace_before_import = parse_simple_whitespace( config, &mut (*self.import_tok).whitespace_before.borrow_mut(), )?; } - let lpar = self.lpar.inflate(config)?; - let names = self.names.inflate(config)?; - let rpar = self.rpar.inflate(config)?; + self.lpar = self.lpar.inflate(config)?; + self.names = self.names.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - module, - names, - relative, - lpar, - rpar, - semicolon, - whitespace_after_from, - whitespace_before_import, - whitespace_after_import, - }) + Ok(self) } } -fn inflate_dots<'r, 'a>( - dots: Vec>, - config: &Config<'a>, -) -> Result>> { +fn inflate_dots<'a>(dots: Vec>, config: &Config<'a>) -> Result>> { let mut ret: Vec> = vec![]; - let mut last_tok: Option> = None; - for dot in dots { + let mut last_tok: Option> = None; + for mut dot in dots { if let Some(last_tokref) = &last_tok { // Consecutive dots having the same Token can only happen if `...` was // parsed as a single ELLIPSIS token. In this case the token's // whitespace_before belongs to the first dot, but the whitespace_after is // moved to the 3rd dot (by swapping it twice) if last_tokref.start_pos == dot.tok.start_pos { - let mut subsequent_dot = Dot { - whitespace_before: Default::default(), - whitespace_after: Default::default(), - }; swap( &mut ret.last_mut().unwrap().whitespace_after, - &mut subsequent_dot.whitespace_after, + &mut dot.whitespace_after, ); - ret.push(subsequent_dot); + ret.push(dot); continue; } } - last_tok = Some(dot.tok); + last_tok = Some(dot.tok.clone()); ret.push(dot.inflate(config)?); } Ok(ret) } -impl<'r, 'a> DeflatedImportFrom<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> ImportFrom<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct ImportAlias<'a> { pub name: NameOrAttribute<'a>, pub asname: Option>, pub comma: Option>, } -impl<'r, 'a> Inflate<'a> for DeflatedImportAlias<'r, 'a> { - type Inflated = ImportAlias<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let name = self.name.inflate(config)?; - let asname = self.asname.inflate(config)?; - let comma = self.comma.inflate(config)?; - Ok(Self::Inflated { - name, - asname, - comma, - }) +impl<'a> Inflate<'a> for ImportAlias<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.name = self.name.inflate(config)?; + self.asname = self.asname.inflate(config)?; + self.comma = self.comma.inflate(config)?; + Ok(self) } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedImportAlias<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for ImportAlias<'a> { + fn with_comma(self, comma: Comma<'a>) -> ImportAlias<'a> { let comma = Some(comma); Self { comma, ..self } } @@ -732,7 +667,7 @@ impl<'a> Codegen<'a> for ImportAlias<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct AsName<'a> { pub name: AssignTargetExpression<'a>, pub whitespace_before_as: ParenthesizableWhitespace<'a>, @@ -750,27 +685,22 @@ impl<'a> Codegen<'a> for AsName<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedAsName<'r, 'a> { - type Inflated = AsName<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before_as = parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for AsName<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before_as = parse_parenthesizable_whitespace( config, &mut (*self.as_tok).whitespace_before.borrow_mut(), )?; - let whitespace_after_as = parse_parenthesizable_whitespace( + self.whitespace_after_as = parse_parenthesizable_whitespace( config, &mut (*self.as_tok).whitespace_after.borrow_mut(), )?; - let name = self.name.inflate(config)?; - Ok(Self::Inflated { - name, - whitespace_before_as, - whitespace_after_as, - }) + self.name = self.name.inflate(config)?; + Ok(self) } } -#[cst_node(Inflate)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate, IntoPy)] pub enum ImportNames<'a> { Star(ImportStar), Aliases(Vec>), @@ -792,10 +722,9 @@ impl<'a> Codegen<'a> for ImportNames<'a> { } } -#[cst_node] +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] pub struct FunctionDef<'a> { pub name: Name<'a>, - pub type_parameters: Option>, pub params: Parameters<'a>, pub body: Suite<'a>, pub decorators: Vec>, @@ -805,7 +734,6 @@ pub struct FunctionDef<'a> { pub lines_after_decorators: Vec>, pub whitespace_after_def: SimpleWhitespace<'a>, pub whitespace_after_name: SimpleWhitespace<'a>, - pub whitespace_after_type_parameters: SimpleWhitespace<'a>, pub whitespace_before_params: ParenthesizableWhitespace<'a>, pub whitespace_before_colon: SimpleWhitespace<'a>, @@ -816,8 +744,8 @@ pub struct FunctionDef<'a> { pub(crate) colon_tok: TokenRef<'a>, } -impl<'r, 'a> DeflatedFunctionDef<'r, 'a> { - pub fn with_decorators(self, decorators: Vec>) -> Self { +impl<'a> FunctionDef<'a> { + pub fn with_decorators(self, decorators: Vec>) -> Self { Self { decorators, ..self } } } @@ -842,12 +770,6 @@ impl<'a> Codegen<'a> for FunctionDef<'a> { self.whitespace_after_def.codegen(state); self.name.codegen(state); self.whitespace_after_name.codegen(state); - - if let Some(tp) = &self.type_parameters { - tp.codegen(state); - self.whitespace_after_type_parameters.codegen(state); - } - state.add_token("("); self.whitespace_before_params.codegen(state); self.params.codegen(state); @@ -863,10 +785,9 @@ impl<'a> Codegen<'a> for FunctionDef<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedFunctionDef<'r, 'a> { - type Inflated = FunctionDef<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let mut decorators = self.decorators.inflate(config)?; +impl<'a> Inflate<'a> for FunctionDef<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.decorators = self.decorators.inflate(config)?; let (asynchronous, leading_lines) = if let Some(asy) = self.async_tok.as_mut() { let whitespace_after = parse_parenthesizable_whitespace(config, &mut asy.whitespace_after.borrow_mut())?; @@ -882,7 +803,8 @@ impl<'r, 'a> Inflate<'a> for DeflatedFunctionDef<'r, 'a> { (None, None) }; - let mut leading_lines = if let Some(ll) = leading_lines { + self.asynchronous = asynchronous; + let leading_lines = if let Some(ll) = leading_lines { ll } else { parse_empty_lines( @@ -892,72 +814,40 @@ impl<'r, 'a> Inflate<'a> for DeflatedFunctionDef<'r, 'a> { )? }; - let mut lines_after_decorators = Default::default(); - - if let Some(dec) = decorators.first_mut() { - swap(&mut lines_after_decorators, &mut leading_lines); - swap(&mut dec.leading_lines, &mut leading_lines); + self.leading_lines = leading_lines; + if let Some(dec) = self.decorators.first_mut() { + swap(&mut self.lines_after_decorators, &mut self.leading_lines); + swap(&mut dec.leading_lines, &mut self.leading_lines); } - let whitespace_after_def = + self.whitespace_after_def = parse_simple_whitespace(config, &mut (*self.def_tok).whitespace_after.borrow_mut())?; - let name = self.name.inflate(config)?; + self.name = self.name.inflate(config)?; + self.whitespace_after_name = parse_simple_whitespace( + config, + &mut (*self.open_paren_tok).whitespace_before.borrow_mut(), + )?; - let whitespace_after_name; - let mut type_parameters = Default::default(); - let mut whitespace_after_type_parameters = Default::default(); - - if let Some(tp) = self.type_parameters { - let rbracket_tok = tp.rbracket.tok.clone(); - whitespace_after_name = parse_simple_whitespace( - config, - &mut tp.lbracket.tok.whitespace_before.borrow_mut(), - )?; - type_parameters = Some(tp.inflate(config)?); - whitespace_after_type_parameters = - parse_simple_whitespace(config, &mut rbracket_tok.whitespace_after.borrow_mut())?; - } else { - whitespace_after_name = parse_simple_whitespace( - config, - &mut self.open_paren_tok.whitespace_before.borrow_mut(), - )?; - } - - let whitespace_before_params = parse_parenthesizable_whitespace( + self.whitespace_before_params = parse_parenthesizable_whitespace( config, &mut (*self.open_paren_tok).whitespace_after.borrow_mut(), )?; - let mut params = self.params.inflate(config)?; - adjust_parameters_trailing_whitespace(config, &mut params, &self.close_paren_tok)?; + self.params = self.params.inflate(config)?; + adjust_parameters_trailing_whitespace(config, &mut self.params, &self.close_paren_tok)?; - let returns = self.returns.inflate(config)?; - let whitespace_before_colon = parse_simple_whitespace( + self.returns = self.returns.inflate(config)?; + self.whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - let body = self.body.inflate(config)?; - Ok(Self::Inflated { - name, - type_parameters, - params, - body, - decorators, - returns, - asynchronous, - leading_lines, - lines_after_decorators, - whitespace_after_def, - whitespace_after_name, - whitespace_after_type_parameters, - whitespace_before_params, - whitespace_before_colon, - }) + self.body = self.body.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] pub struct Decorator<'a> { pub decorator: Expression<'a>, pub leading_lines: Vec>, @@ -981,31 +871,31 @@ impl<'a> Codegen<'a> for Decorator<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedDecorator<'r, 'a> { - type Inflated = Decorator<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for Decorator<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.at_tok).whitespace_before.borrow_mut(), None, )?; - let whitespace_after_at = + self.whitespace_after_at = parse_simple_whitespace(config, &mut (*self.at_tok).whitespace_after.borrow_mut())?; - let decorator = self.decorator.inflate(config)?; - let trailing_whitespace = parse_trailing_whitespace( + self.decorator = self.decorator.inflate(config)?; + self.trailing_whitespace = parse_trailing_whitespace( config, &mut (*self.newline_tok).whitespace_before.borrow_mut(), )?; - Ok(Self::Inflated { - decorator, - leading_lines, - whitespace_after_at, - trailing_whitespace, - }) + Ok(self) } } -#[cst_node] +impl<'a> pyo3::conversion::IntoPy for Box> { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + (*self).into_py(py) + } +} + +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct If<'a> { /// The expression that, when evaluated, should give us a truthy value pub test: Expression<'a>, @@ -1026,7 +916,7 @@ pub struct If<'a> { pub whitespace_after_test: SimpleWhitespace<'a>, /// Signifies if this instance represents an ``elif`` or an ``if`` block. - #[cfg_attr(feature = "py", skip_py)] + #[skip_py] pub is_elif: bool, pub(crate) if_tok: TokenRef<'a>, @@ -1052,44 +942,35 @@ impl<'a> Codegen<'a> for If<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedIf<'r, 'a> { - type Inflated = If<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for If<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.if_tok).whitespace_before.borrow_mut(), None, )?; - let whitespace_before_test = + self.whitespace_before_test = parse_simple_whitespace(config, &mut (*self.if_tok).whitespace_after.borrow_mut())?; - let test = self.test.inflate(config)?; - let whitespace_after_test = parse_simple_whitespace( + self.test = self.test.inflate(config)?; + self.whitespace_after_test = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - let body = self.body.inflate(config)?; - let orelse = self.orelse.inflate(config)?; + self.body = self.body.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; - Ok(Self::Inflated { - test, - body, - orelse, - leading_lines, - whitespace_before_test, - whitespace_after_test, - is_elif: self.is_elif, - }) + Ok(self) } } #[allow(clippy::large_enum_variant)] -#[cst_node(Inflate, Codegen)] +#[derive(Debug, PartialEq, Eq, Clone, Inflate, Codegen, IntoPy)] pub enum OrElse<'a> { Elif(If<'a>), Else(Else<'a>), } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Else<'a> { pub body: Suite<'a>, /// Sequence of empty lines appearing before this compound statement line. @@ -1115,29 +996,24 @@ impl<'a> Codegen<'a> for Else<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedElse<'r, 'a> { - type Inflated = Else<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for Else<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.else_tok).whitespace_before.borrow_mut(), None, )?; - let whitespace_before_colon = parse_simple_whitespace( + self.whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - let body = self.body.inflate(config)?; + self.body = self.body.inflate(config)?; - Ok(Self::Inflated { - body, - leading_lines, - whitespace_before_colon, - }) + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Annotation<'a> { pub annotation: Expression<'a>, pub whitespace_before_indicator: Option>, @@ -1162,27 +1038,22 @@ impl<'a> Annotation<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedAnnotation<'r, 'a> { - type Inflated = Annotation<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_before_indicator = Some(parse_parenthesizable_whitespace( +impl<'a> Inflate<'a> for Annotation<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_before_indicator = Some(parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_before.borrow_mut(), )?); - let whitespace_after_indicator = parse_parenthesizable_whitespace( + self.whitespace_after_indicator = parse_parenthesizable_whitespace( config, &mut (*self.tok).whitespace_after.borrow_mut(), )?; - let annotation = self.annotation.inflate(config)?; - Ok(Self::Inflated { - annotation, - whitespace_before_indicator, - whitespace_after_indicator, - }) + self.annotation = self.annotation.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct AnnAssign<'a> { pub target: AssignTargetExpression<'a>, pub annotation: Annotation<'a>, @@ -1210,31 +1081,24 @@ impl<'a> Codegen<'a> for AnnAssign<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedAnnAssign<'r, 'a> { - type Inflated = AnnAssign<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let target = self.target.inflate(config)?; - let annotation = self.annotation.inflate(config)?; - let value = self.value.inflate(config)?; - let equal = self.equal.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - target, - annotation, - value, - equal, - semicolon, - }) +impl<'a> Inflate<'a> for AnnAssign<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.target = self.target.inflate(config)?; + self.annotation = self.annotation.inflate(config)?; + self.value = self.value.inflate(config)?; + self.equal = self.equal.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } -impl<'r, 'a> DeflatedAnnAssign<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> AnnAssign<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Return<'a> { pub value: Option>, pub whitespace_after_return: Option>, @@ -1261,36 +1125,31 @@ impl<'a> Codegen<'a> for Return<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedReturn<'r, 'a> { - type Inflated = Return<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_return = if self.value.is_some() { - Some(parse_simple_whitespace( +impl<'a> Inflate<'a> for Return<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + if self.value.is_some() { + self.whitespace_after_return = Some(parse_simple_whitespace( config, &mut (*self.return_tok).whitespace_after.borrow_mut(), - )?) + )?); } else { // otherwise space is owned by semicolon or small statement // whitespace is not None to preserve a quirk of the pure python parser - Some(Default::default()) - }; - let value = self.value.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - value, - whitespace_after_return, - semicolon, - }) + self.whitespace_after_return = Some(Default::default()) + } + self.value = self.value.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } -impl<'r, 'a> DeflatedReturn<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Return<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Assert<'a> { pub test: Expression<'a>, pub msg: Option>, @@ -1319,36 +1178,29 @@ impl<'a> Codegen<'a> for Assert<'a> { } } } -impl<'r, 'a> Inflate<'a> for DeflatedAssert<'r, 'a> { - type Inflated = Assert<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_assert = parse_simple_whitespace( +impl<'a> Inflate<'a> for Assert<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_assert = parse_simple_whitespace( config, &mut (*self.assert_tok).whitespace_after.borrow_mut(), )?; - let test = self.test.inflate(config)?; - let comma = self.comma.inflate(config)?; - let msg = self.msg.inflate(config)?; + self.test = self.test.inflate(config)?; + self.comma = self.comma.inflate(config)?; + self.msg = self.msg.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - test, - msg, - comma, - whitespace_after_assert, - semicolon, - }) + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } -impl<'r, 'a> DeflatedAssert<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Assert<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Raise<'a> { pub exc: Option>, pub cause: Option>, @@ -1358,34 +1210,26 @@ pub struct Raise<'a> { pub(crate) raise_tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedRaise<'r, 'a> { - type Inflated = Raise<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_raise = if self.exc.is_some() { - Some(parse_simple_whitespace( +impl<'a> Inflate<'a> for Raise<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + if self.exc.is_some() { + self.whitespace_after_raise = Some(parse_simple_whitespace( config, &mut (*self.raise_tok).whitespace_after.borrow_mut(), - )?) - } else { - Default::default() - }; + )?); + } - let exc = self.exc.inflate(config)?; - let mut cause = self.cause.inflate(config)?; - if exc.is_none() { - if let Some(cause) = cause.as_mut() { + self.exc = self.exc.inflate(config)?; + self.cause = self.cause.inflate(config)?; + if self.exc.is_none() { + if let Some(cause) = self.cause.as_mut() { // in `raise from`, `raise` owns the shared whitespace cause.whitespace_before_from = None; } } - let semicolon = self.semicolon.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - exc, - cause, - whitespace_after_raise, - semicolon, - }) + Ok(self) } } @@ -1412,24 +1256,23 @@ impl<'a> Codegen<'a> for Raise<'a> { } } -impl<'r, 'a> DeflatedRaise<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Raise<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct NameItem<'a> { pub name: Name<'a>, pub comma: Option>, } -impl<'r, 'a> Inflate<'a> for DeflatedNameItem<'r, 'a> { - type Inflated = NameItem<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let name = self.name.inflate(config)?; - let comma = self.comma.inflate(config)?; - Ok(Self::Inflated { name, comma }) +impl<'a> Inflate<'a> for NameItem<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.name = self.name.inflate(config)?; + self.comma = self.comma.inflate(config)?; + Ok(self) } } @@ -1444,7 +1287,7 @@ impl<'a> NameItem<'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Global<'a> { pub names: Vec>, pub whitespace_after_global: SimpleWhitespace<'a>, @@ -1453,18 +1296,13 @@ pub struct Global<'a> { pub(crate) tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedGlobal<'r, 'a> { - type Inflated = Global<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_global = +impl<'a> Inflate<'a> for Global<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_global = parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?; - let names = self.names.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - names, - whitespace_after_global, - semicolon, - }) + self.names = self.names.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } @@ -1483,13 +1321,13 @@ impl<'a> Codegen<'a> for Global<'a> { } } -impl<'r, 'a> DeflatedGlobal<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Global<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Nonlocal<'a> { pub names: Vec>, pub whitespace_after_nonlocal: SimpleWhitespace<'a>, @@ -1498,18 +1336,13 @@ pub struct Nonlocal<'a> { pub(crate) tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedNonlocal<'r, 'a> { - type Inflated = Nonlocal<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_nonlocal = +impl<'a> Inflate<'a> for Nonlocal<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_nonlocal = parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?; - let names = self.names.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - names, - whitespace_after_nonlocal, - semicolon, - }) + self.names = self.names.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } @@ -1528,13 +1361,13 @@ impl<'a> Codegen<'a> for Nonlocal<'a> { } } -impl<'r, 'a> DeflatedNonlocal<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Nonlocal<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct For<'a> { pub target: AssignTargetExpression<'a>, pub iter: Expression<'a>, @@ -1580,9 +1413,8 @@ impl<'a> Codegen<'a> for For<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedFor<'r, 'a> { - type Inflated = For<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { +impl<'a> Inflate<'a> for For<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { let (asynchronous, leading_lines) = if let Some(asy) = self.async_tok.as_mut() { let whitespace_after = parse_parenthesizable_whitespace(config, &mut asy.whitespace_after.borrow_mut())?; @@ -1597,7 +1429,7 @@ impl<'r, 'a> Inflate<'a> for DeflatedFor<'r, 'a> { } else { (None, None) }; - let leading_lines = if let Some(ll) = leading_lines { + self.leading_lines = if let Some(ll) = leading_lines { ll } else { parse_empty_lines( @@ -1606,38 +1438,28 @@ impl<'r, 'a> Inflate<'a> for DeflatedFor<'r, 'a> { None, )? }; - let whitespace_after_for = + self.asynchronous = asynchronous; + self.whitespace_after_for = parse_simple_whitespace(config, &mut (*self.for_tok).whitespace_after.borrow_mut())?; - let target = self.target.inflate(config)?; - let whitespace_before_in = + self.target = self.target.inflate(config)?; + self.whitespace_before_in = parse_simple_whitespace(config, &mut (*self.in_tok).whitespace_before.borrow_mut())?; - let whitespace_after_in = + self.whitespace_after_in = parse_simple_whitespace(config, &mut (*self.in_tok).whitespace_after.borrow_mut())?; - let iter = self.iter.inflate(config)?; - let whitespace_before_colon = parse_simple_whitespace( + self.iter = self.iter.inflate(config)?; + self.whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - let body = self.body.inflate(config)?; - let orelse = self.orelse.inflate(config)?; + self.body = self.body.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; - Ok(Self::Inflated { - target, - iter, - body, - orelse, - asynchronous, - leading_lines, - whitespace_after_for, - whitespace_before_in, - whitespace_after_in, - whitespace_before_colon, - }) + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct While<'a> { pub test: Expression<'a>, pub body: Suite<'a>, @@ -1669,39 +1491,30 @@ impl<'a> Codegen<'a> for While<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedWhile<'r, 'a> { - type Inflated = While<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for While<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.while_tok).whitespace_before.borrow_mut(), None, )?; - let whitespace_after_while = + self.whitespace_after_while = parse_simple_whitespace(config, &mut (*self.while_tok).whitespace_after.borrow_mut())?; - let test = self.test.inflate(config)?; - let whitespace_before_colon = parse_simple_whitespace( + self.test = self.test.inflate(config)?; + self.whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - let body = self.body.inflate(config)?; - let orelse = self.orelse.inflate(config)?; + self.body = self.body.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; - Ok(Self::Inflated { - test, - body, - orelse, - leading_lines, - whitespace_after_while, - whitespace_before_colon, - }) + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct ClassDef<'a> { pub name: Name<'a>, - pub type_parameters: Option>, pub body: Suite<'a>, pub bases: Vec>, pub keywords: Vec>, @@ -1712,12 +1525,10 @@ pub struct ClassDef<'a> { pub lines_after_decorators: Vec>, pub whitespace_after_class: SimpleWhitespace<'a>, pub whitespace_after_name: SimpleWhitespace<'a>, - pub whitespace_after_type_parameters: SimpleWhitespace<'a>, pub whitespace_before_colon: SimpleWhitespace<'a>, pub(crate) class_tok: TokenRef<'a>, - pub(crate) lpar_tok: Option>, - pub(crate) rpar_tok: Option>, + pub(crate) parens_tok: Option<(TokenRef<'a>, TokenRef<'a>)>, pub(crate) colon_tok: TokenRef<'a>, } @@ -1739,11 +1550,6 @@ impl<'a> Codegen<'a> for ClassDef<'a> { self.name.codegen(state); self.whitespace_after_name.codegen(state); - if let Some(tp) = &self.type_parameters { - tp.codegen(state); - self.whitespace_after_type_parameters.codegen(state); - } - let need_parens = !self.bases.is_empty() || !self.keywords.is_empty(); if let Some(lpar) = &self.lpar { @@ -1769,79 +1575,50 @@ impl<'a> Codegen<'a> for ClassDef<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedClassDef<'r, 'a> { - type Inflated = ClassDef<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let mut leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for ClassDef<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.class_tok).whitespace_before.borrow_mut(), None, )?; - let mut decorators = self.decorators.inflate(config)?; - let mut lines_after_decorators = Default::default(); - if let Some(dec) = decorators.first_mut() { - swap(&mut lines_after_decorators, &mut leading_lines); - swap(&mut dec.leading_lines, &mut leading_lines); + self.decorators = self.decorators.inflate(config)?; + if let Some(dec) = self.decorators.first_mut() { + swap(&mut self.lines_after_decorators, &mut self.leading_lines); + swap(&mut dec.leading_lines, &mut self.leading_lines); } - let whitespace_after_class = + self.whitespace_after_class = parse_simple_whitespace(config, &mut (*self.class_tok).whitespace_after.borrow_mut())?; - let name = self.name.inflate(config)?; + self.name = self.name.inflate(config)?; - let (mut whitespace_after_name, mut type_parameters, mut whitespace_after_type_parameters) = - Default::default(); - - if let Some(tparams) = self.type_parameters { - let rbracket_tok = tparams.rbracket.tok.clone(); - whitespace_after_name = parse_simple_whitespace( - config, - &mut tparams.lbracket.tok.whitespace_before.borrow_mut(), - )?; - type_parameters = Some(tparams.inflate(config)?); - whitespace_after_type_parameters = - parse_simple_whitespace(config, &mut rbracket_tok.whitespace_after.borrow_mut())?; - } else if let Some(lpar_tok) = self.lpar_tok.as_mut() { - whitespace_after_name = + if let Some((lpar_tok, _)) = self.parens_tok.as_mut() { + self.whitespace_after_name = parse_simple_whitespace(config, &mut lpar_tok.whitespace_before.borrow_mut())?; + self.lpar = self.lpar.map(|lpar| lpar.inflate(config)).transpose()?; + self.bases = self.bases.inflate(config)?; + self.keywords = self.keywords.inflate(config)?; + self.rpar = self.rpar.map(|rpar| rpar.inflate(config)).transpose()?; + // TODO: set whitespace_after_arg for last arg? } - let lpar = self.lpar.inflate(config)?; - let bases = self.bases.inflate(config)?; - let keywords = self.keywords.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - - let whitespace_before_colon = parse_simple_whitespace( + self.whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - let body = self.body.inflate(config)?; + self.body = self.body.inflate(config)?; - Ok(Self::Inflated { - name, - type_parameters, - body, - bases, - keywords, - decorators, - lpar, - rpar, - leading_lines, - lines_after_decorators, - whitespace_after_class, - whitespace_after_type_parameters, - whitespace_after_name, - whitespace_before_colon, - }) + Ok(self) } } -impl<'r, 'a> DeflatedClassDef<'r, 'a> { - pub fn with_decorators(self, decorators: Vec>) -> Self { +impl<'a> ClassDef<'a> { + pub fn with_decorators(self, decorators: Vec>) -> Self { Self { decorators, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Finally<'a> { pub body: Suite<'a>, pub leading_lines: Vec>, @@ -1865,28 +1642,23 @@ impl<'a> Codegen<'a> for Finally<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedFinally<'r, 'a> { - type Inflated = Finally<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for Finally<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.finally_tok).whitespace_before.borrow_mut(), None, )?; - let whitespace_before_colon = parse_simple_whitespace( + self.whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - let body = self.body.inflate(config)?; - Ok(Self::Inflated { - body, - leading_lines, - whitespace_before_colon, - }) + self.body = self.body.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct ExceptHandler<'a> { pub body: Suite<'a>, pub r#type: Option>, @@ -1920,43 +1692,33 @@ impl<'a> Codegen<'a> for ExceptHandler<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedExceptHandler<'r, 'a> { - type Inflated = ExceptHandler<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for ExceptHandler<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.except_tok).whitespace_before.borrow_mut(), None, )?; - let whitespace_after_except = parse_simple_whitespace( + self.whitespace_after_except = parse_simple_whitespace( config, &mut (*self.except_tok).whitespace_after.borrow_mut(), )?; - let r#type = self.r#type.inflate(config)?; - let name = self.name.inflate(config)?; - let whitespace_before_colon = if name.is_some() { - parse_simple_whitespace( + self.r#type = self.r#type.inflate(config)?; + self.name = self.name.inflate(config)?; + if self.name.is_some() { + self.whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), - )? - } else { - Default::default() - }; + )?; + } - let body = self.body.inflate(config)?; - Ok(Self::Inflated { - body, - r#type, - name, - leading_lines, - whitespace_after_except, - whitespace_before_colon, - }) + self.body = self.body.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct ExceptStarHandler<'a> { pub body: Suite<'a>, pub r#type: Expression<'a>, @@ -1992,41 +1754,33 @@ impl<'a> Codegen<'a> for ExceptStarHandler<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedExceptStarHandler<'r, 'a> { - type Inflated = ExceptStarHandler<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for ExceptStarHandler<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut self.except_tok.whitespace_before.borrow_mut(), None, )?; - let whitespace_after_except = + self.whitespace_after_except = parse_simple_whitespace(config, &mut self.except_tok.whitespace_after.borrow_mut())?; - let whitespace_after_star = + self.whitespace_after_star = parse_simple_whitespace(config, &mut self.star_tok.whitespace_after.borrow_mut())?; - let r#type = self.r#type.inflate(config)?; - let name = self.name.inflate(config)?; - let whitespace_before_colon = if name.is_some() { - parse_simple_whitespace(config, &mut self.colon_tok.whitespace_before.borrow_mut())? - } else { - Default::default() - }; + self.r#type = self.r#type.inflate(config)?; + self.name = self.name.inflate(config)?; + if self.name.is_some() { + self.whitespace_before_colon = parse_simple_whitespace( + config, + &mut self.colon_tok.whitespace_before.borrow_mut(), + )?; + } - let body = self.body.inflate(config)?; - Ok(Self::Inflated { - body, - r#type, - name, - leading_lines, - whitespace_after_except, - whitespace_after_star, - whitespace_before_colon, - }) + self.body = self.body.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Try<'a> { pub body: Suite<'a>, pub handlers: Vec>, @@ -2061,32 +1815,24 @@ impl<'a> Codegen<'a> for Try<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedTry<'r, 'a> { - type Inflated = Try<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for Try<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.try_tok).whitespace_before.borrow_mut(), None, )?; - let whitespace_before_colon = + self.whitespace_before_colon = parse_simple_whitespace(config, &mut (*self.try_tok).whitespace_after.borrow_mut())?; - let body = self.body.inflate(config)?; - let handlers = self.handlers.inflate(config)?; - let orelse = self.orelse.inflate(config)?; - let finalbody = self.finalbody.inflate(config)?; - Ok(Self::Inflated { - body, - handlers, - orelse, - finalbody, - leading_lines, - whitespace_before_colon, - }) + self.body = self.body.inflate(config)?; + self.handlers = self.handlers.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; + self.finalbody = self.finalbody.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct TryStar<'a> { pub body: Suite<'a>, pub handlers: Vec>, @@ -2121,32 +1867,24 @@ impl<'a> Codegen<'a> for TryStar<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedTryStar<'r, 'a> { - type Inflated = TryStar<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for TryStar<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut (*self.try_tok).whitespace_before.borrow_mut(), None, )?; - let whitespace_before_colon = + self.whitespace_before_colon = parse_simple_whitespace(config, &mut (*self.try_tok).whitespace_after.borrow_mut())?; - let body = self.body.inflate(config)?; - let handlers = self.handlers.inflate(config)?; - let orelse = self.orelse.inflate(config)?; - let finalbody = self.finalbody.inflate(config)?; - Ok(Self::Inflated { - body, - handlers, - orelse, - finalbody, - leading_lines, - whitespace_before_colon, - }) + self.body = self.body.inflate(config)?; + self.handlers = self.handlers.inflate(config)?; + self.orelse = self.orelse.inflate(config)?; + self.finalbody = self.finalbody.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct AugAssign<'a> { pub target: AssignTargetExpression<'a>, pub operator: AugOp<'a>, @@ -2154,19 +1892,13 @@ pub struct AugAssign<'a> { pub semicolon: Option>, } -impl<'r, 'a> Inflate<'a> for DeflatedAugAssign<'r, 'a> { - type Inflated = AugAssign<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let target = self.target.inflate(config)?; - let operator = self.operator.inflate(config)?; - let value = self.value.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - target, - operator, - value, - semicolon, - }) +impl<'a> Inflate<'a> for AugAssign<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.target = self.target.inflate(config)?; + self.operator = self.operator.inflate(config)?; + self.value = self.value.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } @@ -2182,33 +1914,29 @@ impl<'a> Codegen<'a> for AugAssign<'a> { } } -impl<'r, 'a> DeflatedAugAssign<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> AugAssign<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct WithItem<'a> { pub item: Expression<'a>, pub asname: Option>, pub comma: Option>, } -impl<'r, 'a> DeflatedWithItem<'r, 'a> { - fn inflate_withitem(self, config: &Config<'a>, is_last: bool) -> Result> { - let item = self.item.inflate(config)?; - let asname = self.asname.inflate(config)?; - let comma = if is_last { +impl<'a> WithItem<'a> { + fn inflate_withitem(mut self, config: &Config<'a>, is_last: bool) -> Result { + self.item = self.item.inflate(config)?; + self.asname = self.asname.inflate(config)?; + self.comma = if is_last { self.comma.map(|c| c.inflate_before(config)).transpose()? } else { self.comma.map(|c| c.inflate(config)).transpose()? }; - Ok(WithItem { - item, - asname, - comma, - }) + Ok(self) } } @@ -2224,8 +1952,8 @@ impl<'a> Codegen<'a> for WithItem<'a> { } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedWithItem<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for WithItem<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { Self { comma: Some(comma), ..self @@ -2233,7 +1961,7 @@ impl<'r, 'a> WithComma<'r, 'a> for DeflatedWithItem<'r, 'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct With<'a> { pub items: Vec>, pub body: Suite<'a>, @@ -2293,9 +2021,8 @@ impl<'a> Codegen<'a> for With<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedWith<'r, 'a> { - type Inflated = With<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { +impl<'a> Inflate<'a> for With<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { let (asynchronous, leading_lines) = if let Some(asy) = self.async_tok.as_mut() { let whitespace_after = parse_parenthesizable_whitespace(config, &mut asy.whitespace_after.borrow_mut())?; @@ -2311,7 +2038,9 @@ impl<'r, 'a> Inflate<'a> for DeflatedWith<'r, 'a> { (None, None) }; - let leading_lines = if let Some(ll) = leading_lines { + self.asynchronous = asynchronous; + + self.leading_lines = if let Some(ll) = leading_lines { ll } else { parse_empty_lines( @@ -2321,73 +2050,61 @@ impl<'r, 'a> Inflate<'a> for DeflatedWith<'r, 'a> { )? }; - let whitespace_after_with = + self.whitespace_after_with = parse_simple_whitespace(config, &mut (*self.with_tok).whitespace_after.borrow_mut())?; - let lpar = self.lpar.map(|lpar| lpar.inflate(config)).transpose()?; + self.lpar = self.lpar.map(|lpar| lpar.inflate(config)).transpose()?; let len = self.items.len(); - let items = self + self.items = self .items .into_iter() .enumerate() .map(|(idx, el)| el.inflate_withitem(config, idx + 1 == len)) .collect::>>()?; - let rpar = if !items.is_empty() { + if !self.items.is_empty() { // rpar only has whitespace if items is non empty - self.rpar.map(|rpar| rpar.inflate(config)).transpose()? - } else { - Default::default() - }; - let whitespace_before_colon = parse_simple_whitespace( + self.rpar = self.rpar.map(|rpar| rpar.inflate(config)).transpose()?; + } + self.whitespace_before_colon = parse_simple_whitespace( config, &mut (*self.colon_tok).whitespace_before.borrow_mut(), )?; - let body = self.body.inflate(config)?; + self.body = self.body.inflate(config)?; - Ok(Self::Inflated { - items, - body, - asynchronous, - leading_lines, - lpar, - rpar, - whitespace_after_with, - whitespace_before_colon, - }) + Ok(self) } } -#[cst_node(Codegen, ParenthesizedNode, Inflate)] +#[allow(clippy::large_enum_variant)] +#[derive(Debug, PartialEq, Eq, Clone, Codegen, ParenthesizedNode, Inflate, IntoPy)] pub enum DelTargetExpression<'a> { - Name(Box>), - Attribute(Box>), - Tuple(Box>), - List(Box>), - Subscript(Box>), + Name(Name<'a>), + Attribute(Attribute<'a>), + Tuple(Tuple<'a>), + List(List<'a>), + Subscript(Subscript<'a>), } -impl<'r, 'a> std::convert::From> - for DeflatedExpression<'r, 'a> -{ - fn from(d: DeflatedDelTargetExpression<'r, 'a>) -> Self { +impl<'a> std::convert::From> for Expression<'a> { + fn from(d: DelTargetExpression<'a>) -> Self { match d { - DeflatedDelTargetExpression::Attribute(a) => Self::Attribute(a), - DeflatedDelTargetExpression::List(l) => Self::List(l), - DeflatedDelTargetExpression::Name(n) => Self::Name(n), - DeflatedDelTargetExpression::Subscript(s) => Self::Subscript(s), - DeflatedDelTargetExpression::Tuple(t) => Self::Tuple(t), + DelTargetExpression::Attribute(a) => Expression::Attribute(a), + DelTargetExpression::List(l) => Expression::List(l), + DelTargetExpression::Name(n) => Expression::Name(n), + DelTargetExpression::Subscript(s) => Expression::Subscript(s), + DelTargetExpression::Tuple(t) => Expression::Tuple(t), } } } -impl<'r, 'a> std::convert::From> for DeflatedElement<'r, 'a> { - fn from(d: DeflatedDelTargetExpression<'r, 'a>) -> Self { - Self::Simple { +impl<'a> std::convert::From> for Element<'a> { + fn from(d: DelTargetExpression<'a>) -> Element { + Element::Simple { value: d.into(), comma: None, } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Del<'a> { pub target: DelTargetExpression<'a>, pub whitespace_after_del: SimpleWhitespace<'a>, @@ -2396,18 +2113,13 @@ pub struct Del<'a> { pub(crate) tok: TokenRef<'a>, } -impl<'r, 'a> Inflate<'a> for DeflatedDel<'r, 'a> { - type Inflated = Del<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_del = +impl<'a> Inflate<'a> for Del<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.whitespace_after_del = parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?; - let target = self.target.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - target, - whitespace_after_del, - semicolon, - }) + self.target = self.target.inflate(config)?; + self.semicolon = self.semicolon.inflate(config)?; + Ok(self) } } @@ -2422,13 +2134,13 @@ impl<'a> Codegen<'a> for Del<'a> { } } -impl<'r, 'a> DeflatedDel<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { +impl<'a> Del<'a> { + pub fn with_semicolon(self, semicolon: Option>) -> Self { Self { semicolon, ..self } } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct Match<'a> { pub subject: Expression<'a>, pub cases: Vec>, @@ -2474,46 +2186,36 @@ impl<'a> Codegen<'a> for Match<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatch<'r, 'a> { - type Inflated = Match<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for Match<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut self.match_tok.whitespace_before.borrow_mut(), None, )?; - let whitespace_after_match = + self.whitespace_after_match = parse_simple_whitespace(config, &mut self.match_tok.whitespace_after.borrow_mut())?; - let subject = self.subject.inflate(config)?; - let whitespace_before_colon = + self.subject = self.subject.inflate(config)?; + self.whitespace_before_colon = parse_simple_whitespace(config, &mut self.colon_tok.whitespace_before.borrow_mut())?; - let whitespace_after_colon = + self.whitespace_after_colon = parse_trailing_whitespace(config, &mut self.colon_tok.whitespace_after.borrow_mut())?; - let mut indent = self.indent_tok.relative_indent; - if indent == Some(config.default_indent) { - indent = None; + self.indent = self.indent_tok.relative_indent; + if self.indent == Some(config.default_indent) { + self.indent = None; } - let cases = self.cases.inflate(config)?; + self.cases = self.cases.inflate(config)?; // See note about footers in `IndentedBlock`'s inflate fn - let footer = parse_empty_lines( + self.footer = parse_empty_lines( config, &mut self.dedent_tok.whitespace_after.borrow_mut(), Some(self.indent_tok.whitespace_before.borrow().absolute_indent), )?; - Ok(Self::Inflated { - subject, - cases, - leading_lines, - whitespace_after_match, - whitespace_before_colon, - whitespace_after_colon, - indent, - footer, - }) + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct MatchCase<'a> { pub pattern: MatchPattern<'a>, pub guard: Option>, @@ -2551,45 +2253,33 @@ impl<'a> Codegen<'a> for MatchCase<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchCase<'r, 'a> { - type Inflated = MatchCase<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let leading_lines = parse_empty_lines( +impl<'a> Inflate<'a> for MatchCase<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.leading_lines = parse_empty_lines( config, &mut self.case_tok.whitespace_before.borrow_mut(), None, )?; - let whitespace_after_case = + self.whitespace_after_case = parse_simple_whitespace(config, &mut self.case_tok.whitespace_after.borrow_mut())?; - let pattern = self.pattern.inflate(config)?; - let (whitespace_before_if, whitespace_after_if, guard) = - if let Some(if_tok) = self.if_tok.as_mut() { - ( - parse_simple_whitespace(config, &mut if_tok.whitespace_before.borrow_mut())?, - parse_simple_whitespace(config, &mut if_tok.whitespace_after.borrow_mut())?, - self.guard.inflate(config)?, - ) - } else { - Default::default() - }; - let whitespace_before_colon = + self.pattern = self.pattern.inflate(config)?; + if let Some(if_tok) = self.if_tok.as_mut() { + self.whitespace_before_if = + parse_simple_whitespace(config, &mut if_tok.whitespace_before.borrow_mut())?; + self.whitespace_after_if = + parse_simple_whitespace(config, &mut if_tok.whitespace_after.borrow_mut())?; + + self.guard = self.guard.inflate(config)?; + } + self.whitespace_before_colon = parse_simple_whitespace(config, &mut self.colon_tok.whitespace_before.borrow_mut())?; - let body = self.body.inflate(config)?; - Ok(Self::Inflated { - pattern, - guard, - body, - leading_lines, - whitespace_after_case, - whitespace_before_if, - whitespace_after_if, - whitespace_before_colon, - }) + self.body = self.body.inflate(config)?; + Ok(self) } } #[allow(clippy::large_enum_variant)] -#[cst_node(Codegen, Inflate, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, Codegen, Inflate, ParenthesizedNode)] pub enum MatchPattern<'a> { Value(MatchValue<'a>), Singleton(MatchSingleton<'a>), @@ -2600,7 +2290,7 @@ pub enum MatchPattern<'a> { Or(Box>), } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct MatchValue<'a> { pub value: Expression<'a>, } @@ -2631,33 +2321,14 @@ impl<'a> Codegen<'a> for MatchValue<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchValue<'r, 'a> { - type Inflated = MatchValue<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let value = self.value.inflate(config)?; - Ok(Self::Inflated { value }) +impl<'a> Inflate<'a> for MatchValue<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.value = self.value.inflate(config)?; + Ok(self) } } -impl<'r, 'a> ParenthesizedDeflatedNode<'r, 'a> for DeflatedMatchValue<'r, 'a> { - fn lpar(&self) -> &Vec> { - self.value.lpar() - } - fn rpar(&self) -> &Vec> { - self.value.rpar() - } - fn with_parens( - self, - left: DeflatedLeftParen<'r, 'a>, - right: DeflatedRightParen<'r, 'a>, - ) -> Self { - Self { - value: self.value.with_parens(left, right), - } - } -} - -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct MatchSingleton<'a> { pub value: Name<'a>, } @@ -2688,40 +2359,21 @@ impl<'a> Codegen<'a> for MatchSingleton<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchSingleton<'r, 'a> { - type Inflated = MatchSingleton<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let value = self.value.inflate(config)?; - Ok(Self::Inflated { value }) - } -} - -impl<'r, 'a> ParenthesizedDeflatedNode<'r, 'a> for DeflatedMatchSingleton<'r, 'a> { - fn lpar(&self) -> &Vec> { - self.value.lpar() - } - fn rpar(&self) -> &Vec> { - self.value.rpar() - } - fn with_parens( - self, - left: DeflatedLeftParen<'r, 'a>, - right: DeflatedRightParen<'r, 'a>, - ) -> Self { - Self { - value: self.value.with_parens(left, right), - } +impl<'a> Inflate<'a> for MatchSingleton<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.value = self.value.inflate(config)?; + Ok(self) } } #[allow(clippy::large_enum_variant)] -#[cst_node(Codegen, Inflate, ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, Codegen, Inflate, ParenthesizedNode)] pub enum MatchSequence<'a> { MatchList(MatchList<'a>), MatchTuple(MatchTuple<'a>), } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] pub struct MatchList<'a> { pub patterns: Vec>, pub lbracket: Option>, @@ -2747,33 +2399,26 @@ impl<'a> Codegen<'a> for MatchList<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchList<'r, 'a> { - type Inflated = MatchList<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let lbracket = self.lbracket.inflate(config)?; +impl<'a> Inflate<'a> for MatchList<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbracket = self.lbracket.inflate(config)?; let len = self.patterns.len(); - let patterns = self + self.patterns = self .patterns .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) .collect::>>()?; - let rbracket = self.rbracket.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - patterns, - lbracket, - rbracket, - lpar, - rpar, - }) + self.rbracket = self.rbracket.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] pub struct MatchTuple<'a> { pub patterns: Vec>, pub lpar: Vec>, @@ -2795,28 +2440,23 @@ impl<'a> Codegen<'a> for MatchTuple<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchTuple<'r, 'a> { - type Inflated = MatchTuple<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; +impl<'a> Inflate<'a> for MatchTuple<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; let len = self.patterns.len(); - let patterns = self + self.patterns = self .patterns .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, idx + 1 == len)) .collect::>>()?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - patterns, - lpar, - rpar, - }) + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } #[allow(clippy::large_enum_variant)] -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub enum StarrableMatchSequenceElement<'a> { Simple(MatchSequenceElement<'a>), Starred(MatchStar<'a>), @@ -2834,26 +2474,16 @@ impl<'a> StarrableMatchSequenceElement<'a> { Self::Starred(s) => s.codegen(state, default_comma, default_comma_whitespace), } } -} -impl<'r, 'a> DeflatedStarrableMatchSequenceElement<'r, 'a> { - fn inflate_element( - self, - config: &Config<'a>, - last_element: bool, - ) -> Result> { + fn inflate_element(self, config: &Config<'a>, last_element: bool) -> Result { Ok(match self { - Self::Simple(s) => { - StarrableMatchSequenceElement::Simple(s.inflate_element(config, last_element)?) - } - Self::Starred(s) => { - StarrableMatchSequenceElement::Starred(s.inflate_element(config, last_element)?) - } + Self::Simple(s) => Self::Simple(s.inflate_element(config, last_element)?), + Self::Starred(s) => Self::Starred(s.inflate_element(config, last_element)?), }) } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedStarrableMatchSequenceElement<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for StarrableMatchSequenceElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { match self { Self::Simple(s) => Self::Simple(s.with_comma(comma)), Self::Starred(s) => Self::Starred(s.with_comma(comma)), @@ -2861,7 +2491,7 @@ impl<'r, 'a> WithComma<'r, 'a> for DeflatedStarrableMatchSequenceElement<'r, 'a> } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct MatchSequenceElement<'a> { pub value: MatchPattern<'a>, pub comma: Option>, @@ -2880,25 +2510,20 @@ impl<'a> MatchSequenceElement<'a> { state.add_token(if default_comma_whitespace { ", " } else { "," }); } } -} -impl<'r, 'a> DeflatedMatchSequenceElement<'r, 'a> { - fn inflate_element( - self, - config: &Config<'a>, - last_element: bool, - ) -> Result> { - let value = self.value.inflate(config)?; - let comma = if last_element { + + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.value = self.value.inflate(config)?; + self.comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(MatchSequenceElement { value, comma }) + Ok(self) } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchSequenceElement<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for MatchSequenceElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { Self { comma: Some(comma), ..self @@ -2906,7 +2531,7 @@ impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchSequenceElement<'r, 'a> { } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct MatchStar<'a> { pub name: Option>, pub comma: Option>, @@ -2934,29 +2559,24 @@ impl<'a> MatchStar<'a> { state.add_token(if default_comma_whitespace { ", " } else { "," }); } } -} -impl<'r, 'a> DeflatedMatchStar<'r, 'a> { - fn inflate_element(self, config: &Config<'a>, last_element: bool) -> Result> { - let whitespace_before_name = parse_parenthesizable_whitespace( + + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.whitespace_before_name = parse_parenthesizable_whitespace( config, &mut self.star_tok.whitespace_after.borrow_mut(), )?; - let name = self.name.inflate(config)?; - let comma = if last_element { + self.name = self.name.inflate(config)?; + self.comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(MatchStar { - name, - comma, - whitespace_before_name, - }) + Ok(self) } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchStar<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for MatchStar<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { Self { comma: Some(comma), ..self @@ -2964,7 +2584,7 @@ impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchStar<'r, 'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] pub struct MatchMapping<'a> { pub elements: Vec>, pub rest: Option>, @@ -2998,50 +2618,37 @@ impl<'a> Codegen<'a> for MatchMapping<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchMapping<'r, 'a> { - type Inflated = MatchMapping<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let lbrace = self.lbrace.inflate(config)?; +impl<'a> Inflate<'a> for MatchMapping<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.lbrace = self.lbrace.inflate(config)?; let len = self.elements.len(); let no_star = self.star_tok.is_none(); - let elements = self + self.elements = self .elements .into_iter() .enumerate() .map(|(idx, el)| el.inflate_element(config, no_star && idx + 1 == len)) .collect::>>()?; - let (whitespace_before_rest, rest, trailing_comma) = - if let Some(star_tok) = self.star_tok.as_mut() { - ( - parse_simple_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())?, - self.rest.inflate(config)?, - self.trailing_comma - .map(|c| c.inflate_before(config)) - .transpose()?, - ) - } else { - Default::default() - }; + if let Some(star_tok) = self.star_tok.as_mut() { + self.whitespace_before_rest = + parse_simple_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())?; + self.rest = self.rest.inflate(config)?; + self.trailing_comma = self + .trailing_comma + .map(|c| c.inflate_before(config)) + .transpose()?; + } - let rbrace = self.rbrace.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - elements, - rest, - trailing_comma, - lbrace, - rbrace, - lpar, - rpar, - whitespace_before_rest, - }) + self.rbrace = self.rbrace.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct MatchMappingElement<'a> { pub key: Expression<'a>, pub pattern: MatchPattern<'a>, @@ -3065,40 +2672,29 @@ impl<'a> MatchMappingElement<'a> { state.add_token(", "); } } -} -impl<'r, 'a> DeflatedMatchMappingElement<'r, 'a> { - fn inflate_element( - self, - config: &Config<'a>, - last_element: bool, - ) -> Result> { - let key = self.key.inflate(config)?; - let whitespace_before_colon = parse_parenthesizable_whitespace( + + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.key = self.key.inflate(config)?; + self.whitespace_before_colon = parse_parenthesizable_whitespace( config, &mut self.colon_tok.whitespace_before.borrow_mut(), )?; - let whitespace_after_colon = parse_parenthesizable_whitespace( + self.whitespace_after_colon = parse_parenthesizable_whitespace( config, &mut self.colon_tok.whitespace_after.borrow_mut(), )?; - let pattern = self.pattern.inflate(config)?; - let comma = if last_element { + self.pattern = self.pattern.inflate(config)?; + self.comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(MatchMappingElement { - key, - pattern, - comma, - whitespace_before_colon, - whitespace_after_colon, - }) + Ok(self) } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchMappingElement<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for MatchMappingElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { Self { comma: Some(comma), ..self @@ -3106,7 +2702,7 @@ impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchMappingElement<'r, 'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] pub struct MatchClass<'a> { pub cls: NameOrAttribute<'a>, pub patterns: Vec>, @@ -3143,56 +2739,46 @@ impl<'a> Codegen<'a> for MatchClass<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchClass<'r, 'a> { - type Inflated = MatchClass<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; +impl<'a> Inflate<'a> for MatchClass<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; - let cls = self.cls.inflate(config)?; - let whitespace_after_cls = parse_parenthesizable_whitespace( + self.cls = self.cls.inflate(config)?; + self.whitespace_after_cls = parse_parenthesizable_whitespace( config, &mut self.lpar_tok.whitespace_before.borrow_mut(), )?; - let whitespace_before_patterns = parse_parenthesizable_whitespace( + self.whitespace_before_patterns = parse_parenthesizable_whitespace( config, &mut self.lpar_tok.whitespace_after.borrow_mut(), )?; let patlen = self.patterns.len(); let kwdlen = self.kwds.len(); - let patterns = self + self.patterns = self .patterns .into_iter() .enumerate() .map(|(idx, pat)| pat.inflate_element(config, idx + 1 == patlen + kwdlen)) .collect::>()?; - let kwds = self + self.kwds = self .kwds .into_iter() .enumerate() .map(|(idx, kwd)| kwd.inflate_element(config, idx + 1 == kwdlen)) .collect::>()?; - let whitespace_after_kwds = parse_parenthesizable_whitespace( + self.whitespace_after_kwds = parse_parenthesizable_whitespace( config, &mut self.rpar_tok.whitespace_before.borrow_mut(), )?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - cls, - patterns, - kwds, - lpar, - rpar, - whitespace_after_cls, - whitespace_before_patterns, - whitespace_after_kwds, - }) + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct MatchKeywordElement<'a> { pub key: Name<'a>, pub pattern: MatchPattern<'a>, @@ -3216,40 +2802,28 @@ impl<'a> MatchKeywordElement<'a> { state.add_token(", "); } } -} -impl<'r, 'a> DeflatedMatchKeywordElement<'r, 'a> { - fn inflate_element( - self, - config: &Config<'a>, - last_element: bool, - ) -> Result> { - let key = self.key.inflate(config)?; - let whitespace_before_equal = parse_parenthesizable_whitespace( + fn inflate_element(mut self, config: &Config<'a>, last_element: bool) -> Result { + self.key = self.key.inflate(config)?; + self.whitespace_before_equal = parse_parenthesizable_whitespace( config, &mut self.equal_tok.whitespace_before.borrow_mut(), )?; - let whitespace_after_equal = parse_parenthesizable_whitespace( + self.whitespace_after_equal = parse_parenthesizable_whitespace( config, &mut self.equal_tok.whitespace_after.borrow_mut(), )?; - let pattern = self.pattern.inflate(config)?; - let comma = if last_element { + self.pattern = self.pattern.inflate(config)?; + self.comma = if last_element { self.comma.map(|c| c.inflate_before(config)).transpose() } else { self.comma.inflate(config) }?; - Ok(MatchKeywordElement { - key, - pattern, - comma, - whitespace_before_equal, - whitespace_after_equal, - }) + Ok(self) } } -impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchKeywordElement<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { +impl<'a> WithComma<'a> for MatchKeywordElement<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self { Self { comma: Some(comma), ..self @@ -3257,7 +2831,7 @@ impl<'r, 'a> WithComma<'r, 'a> for DeflatedMatchKeywordElement<'r, 'a> { } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] pub struct MatchAs<'a> { pub pattern: Option>, pub name: Option>, @@ -3288,40 +2862,27 @@ impl<'a> Codegen<'a> for MatchAs<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchAs<'r, 'a> { - type Inflated = MatchAs<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let pattern = self.pattern.inflate(config)?; - let (whitespace_before_as, whitespace_after_as) = if let Some(as_tok) = self.as_tok.as_mut() - { - ( - Some(parse_parenthesizable_whitespace( - config, - &mut as_tok.whitespace_before.borrow_mut(), - )?), - Some(parse_parenthesizable_whitespace( - config, - &mut as_tok.whitespace_after.borrow_mut(), - )?), - ) - } else { - Default::default() - }; - let name = self.name.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - pattern, - name, - lpar, - rpar, - whitespace_before_as, - whitespace_after_as, - }) +impl<'a> Inflate<'a> for MatchAs<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.pattern = self.pattern.inflate(config)?; + if let Some(as_tok) = self.as_tok.as_mut() { + self.whitespace_before_as = Some(parse_parenthesizable_whitespace( + config, + &mut as_tok.whitespace_before.borrow_mut(), + )?); + self.whitespace_after_as = Some(parse_parenthesizable_whitespace( + config, + &mut as_tok.whitespace_after.borrow_mut(), + )?); + } + self.name = self.name.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } -#[cst_node] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy)] pub struct MatchOrElement<'a> { pub pattern: MatchPattern<'a>, pub separator: Option>, @@ -3337,16 +2898,15 @@ impl<'a> MatchOrElement<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchOrElement<'r, 'a> { - type Inflated = MatchOrElement<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let pattern = self.pattern.inflate(config)?; - let separator = self.separator.inflate(config)?; - Ok(Self::Inflated { pattern, separator }) +impl<'a> Inflate<'a> for MatchOrElement<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.pattern = self.pattern.inflate(config)?; + self.separator = self.separator.inflate(config)?; + Ok(self) } } -#[cst_node(ParenthesizedNode)] +#[derive(Debug, PartialEq, Eq, Clone, IntoPy, ParenthesizedNode)] pub struct MatchOr<'a> { pub patterns: Vec>, pub lpar: Vec>, @@ -3364,281 +2924,11 @@ impl<'a> Codegen<'a> for MatchOr<'a> { } } -impl<'r, 'a> Inflate<'a> for DeflatedMatchOr<'r, 'a> { - type Inflated = MatchOr<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lpar = self.lpar.inflate(config)?; - let patterns = self.patterns.inflate(config)?; - let rpar = self.rpar.inflate(config)?; - Ok(Self::Inflated { - patterns, - lpar, - rpar, - }) - } -} - -#[cst_node] -pub struct TypeVar<'a> { - pub name: Name<'a>, - pub bound: Option>>, - pub colon: Option>, -} - -impl<'a> Codegen<'a> for TypeVar<'a> { - fn codegen(&self, state: &mut CodegenState<'a>) { - self.name.codegen(state); - self.colon.codegen(state); - if let Some(bound) = &self.bound { - bound.codegen(state); - } - } -} - -impl<'r, 'a> Inflate<'a> for DeflatedTypeVar<'r, 'a> { - type Inflated = TypeVar<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let name = self.name.inflate(config)?; - let colon = self.colon.inflate(config)?; - let bound = self.bound.inflate(config)?; - Ok(Self::Inflated { name, bound, colon }) - } -} - -#[cst_node] -pub struct TypeVarTuple<'a> { - pub name: Name<'a>, - - pub whitespace_after_star: SimpleWhitespace<'a>, - - pub(crate) star_tok: TokenRef<'a>, -} - -impl<'a> Codegen<'a> for TypeVarTuple<'a> { - fn codegen(&self, state: &mut CodegenState<'a>) { - state.add_token("*"); - self.whitespace_after_star.codegen(state); - self.name.codegen(state); - } -} - -impl<'r, 'a> Inflate<'a> for DeflatedTypeVarTuple<'r, 'a> { - type Inflated = TypeVarTuple<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_star = - parse_simple_whitespace(config, &mut self.star_tok.whitespace_after.borrow_mut())?; - let name = self.name.inflate(config)?; - Ok(Self::Inflated { - name, - whitespace_after_star, - }) - } -} - -#[cst_node] -pub struct ParamSpec<'a> { - pub name: Name<'a>, - - pub whitespace_after_star: SimpleWhitespace<'a>, - - pub(crate) star_tok: TokenRef<'a>, -} - -impl<'a> Codegen<'a> for ParamSpec<'a> { - fn codegen(&self, state: &mut CodegenState<'a>) { - state.add_token("**"); - self.whitespace_after_star.codegen(state); - self.name.codegen(state); - } -} - -impl<'r, 'a> Inflate<'a> for DeflatedParamSpec<'r, 'a> { - type Inflated = ParamSpec<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_star = - parse_simple_whitespace(config, &mut self.star_tok.whitespace_after.borrow_mut())?; - let name = self.name.inflate(config)?; - Ok(Self::Inflated { - name, - whitespace_after_star, - }) - } -} - -#[cst_node(Inflate, Codegen)] -pub enum TypeVarLike<'a> { - TypeVar(TypeVar<'a>), - TypeVarTuple(TypeVarTuple<'a>), - ParamSpec(ParamSpec<'a>), -} - -#[cst_node] -pub struct TypeParam<'a> { - pub param: TypeVarLike<'a>, - pub comma: Option>, - pub equal: Option>, - pub star: &'a str, - pub whitespace_after_star: SimpleWhitespace<'a>, - pub default: Option>, - pub star_tok: Option>, -} - -impl<'a> Codegen<'a> for TypeParam<'a> { - fn codegen(&self, state: &mut CodegenState<'a>) { - self.param.codegen(state); - self.equal.codegen(state); - state.add_token(self.star); - self.whitespace_after_star.codegen(state); - self.default.codegen(state); - self.comma.codegen(state); - } -} - -impl<'r, 'a> Inflate<'a> for DeflatedTypeParam<'r, 'a> { - type Inflated = TypeParam<'a>; - fn inflate(mut self, config: &Config<'a>) -> Result { - let whitespace_after_star = if let Some(star_tok) = self.star_tok.as_mut() { - parse_simple_whitespace(config, &mut star_tok.whitespace_after.borrow_mut())? - } else { - Default::default() - }; - let param = self.param.inflate(config)?; - let equal = self.equal.inflate(config)?; - let default = self.default.inflate(config)?; - let comma = self.comma.inflate(config)?; - Ok(Self::Inflated { - param, - comma, - equal, - star: self.star, - whitespace_after_star, - default, - }) - } -} - -impl<'r, 'a> WithComma<'r, 'a> for DeflatedTypeParam<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self { - Self { - comma: Some(comma), - ..self - } - } -} - -#[cst_node] -pub struct TypeParameters<'a> { - pub params: Vec>, - - pub lbracket: LeftSquareBracket<'a>, - pub rbracket: RightSquareBracket<'a>, -} - -impl<'a> Codegen<'a> for TypeParameters<'a> { - fn codegen(&self, state: &mut CodegenState<'a>) { - self.lbracket.codegen(state); - let params_len = self.params.len(); - for (idx, param) in self.params.iter().enumerate() { - param.codegen(state); - if idx + 1 < params_len && param.comma.is_none() { - state.add_token(", "); - } - } - self.rbracket.codegen(state); - } -} - -impl<'r, 'a> Inflate<'a> for DeflatedTypeParameters<'r, 'a> { - type Inflated = TypeParameters<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let lbracket = self.lbracket.inflate(config)?; - let params = self.params.inflate(config)?; - let rbracket = self.rbracket.inflate(config)?; - Ok(Self::Inflated { - params, - lbracket, - rbracket, - }) - } -} - -#[cst_node] -pub struct TypeAlias<'a> { - pub name: Name<'a>, - pub value: Box>, - pub type_parameters: Option>, - - pub whitespace_after_type: SimpleWhitespace<'a>, - pub whitespace_after_name: Option>, - pub whitespace_after_type_parameters: Option>, - pub whitespace_after_equals: SimpleWhitespace<'a>, - pub semicolon: Option>, - - pub(crate) type_tok: TokenRef<'a>, - pub(crate) lbracket_tok: Option>, - pub(crate) equals_tok: TokenRef<'a>, -} - -impl<'a> Codegen<'a> for TypeAlias<'a> { - fn codegen(&self, state: &mut CodegenState<'a>) { - state.add_token("type"); - self.whitespace_after_type.codegen(state); - self.name.codegen(state); - if self.whitespace_after_name.is_none() && self.type_parameters.is_none() { - state.add_token(" "); - } else { - self.whitespace_after_name.codegen(state); - } - if self.type_parameters.is_some() { - self.type_parameters.codegen(state); - self.whitespace_after_type_parameters.codegen(state); - } - state.add_token("="); - self.whitespace_after_equals.codegen(state); - self.value.codegen(state); - self.semicolon.codegen(state); - } -} - -impl<'r, 'a> Inflate<'a> for DeflatedTypeAlias<'r, 'a> { - type Inflated = TypeAlias<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let whitespace_after_type = - parse_simple_whitespace(config, &mut self.type_tok.whitespace_after.borrow_mut())?; - let name = self.name.inflate(config)?; - let whitespace_after_name = Some(if let Some(tok) = self.lbracket_tok { - parse_simple_whitespace(config, &mut tok.whitespace_before.borrow_mut()) - } else { - parse_simple_whitespace(config, &mut self.equals_tok.whitespace_before.borrow_mut()) - }?); - let type_parameters = self.type_parameters.inflate(config)?; - let whitespace_after_type_parameters = if type_parameters.is_some() { - Some(parse_simple_whitespace( - config, - &mut self.equals_tok.whitespace_before.borrow_mut(), - )?) - } else { - None - }; - let whitespace_after_equals = - parse_simple_whitespace(config, &mut self.equals_tok.whitespace_after.borrow_mut())?; - let value = self.value.inflate(config)?; - let semicolon = self.semicolon.inflate(config)?; - Ok(Self::Inflated { - name, - value, - type_parameters, - whitespace_after_type, - whitespace_after_name, - whitespace_after_type_parameters, - whitespace_after_equals, - semicolon, - }) - } -} - -impl<'r, 'a> DeflatedTypeAlias<'r, 'a> { - pub fn with_semicolon(self, semicolon: Option>) -> Self { - Self { semicolon, ..self } +impl<'a> Inflate<'a> for MatchOr<'a> { + fn inflate(mut self, config: &Config<'a>) -> Result { + self.lpar = self.lpar.inflate(config)?; + self.patterns = self.patterns.inflate(config)?; + self.rpar = self.rpar.inflate(config)?; + Ok(self) } } diff --git a/native/libcst/src/nodes/test_utils.rs b/native/libcst/src/nodes/test_utils.rs index 675b493d..109d471d 100644 --- a/native/libcst/src/nodes/test_utils.rs +++ b/native/libcst/src/nodes/test_utils.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/nodes/traits.rs b/native/libcst/src/nodes/traits.rs index c15a60e1..b6ab115c 100644 --- a/native/libcst/src/nodes/traits.rs +++ b/native/libcst/src/nodes/traits.rs @@ -1,18 +1,16 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree use crate::{ - nodes::expression::{DeflatedLeftParen, DeflatedRightParen}, - nodes::op::DeflatedComma, tokenizer::whitespace_parser::{Config, WhitespaceError}, - Codegen, CodegenState, EmptyLine, LeftParen, RightParen, + Codegen, CodegenState, Comma, EmptyLine, LeftParen, RightParen, }; use std::ops::Deref; -pub trait WithComma<'r, 'a> { - fn with_comma(self, comma: DeflatedComma<'r, 'a>) -> Self; +pub trait WithComma<'a> { + fn with_comma(self, comma: Comma<'a>) -> Self; } pub trait ParenthesizedNode<'a> { @@ -53,32 +51,6 @@ impl<'a, T: ParenthesizedNode<'a>> ParenthesizedNode<'a> for Box { } } -pub trait ParenthesizedDeflatedNode<'r, 'a> { - fn lpar(&self) -> &Vec>; - fn rpar(&self) -> &Vec>; - - fn with_parens( - self, - left: DeflatedLeftParen<'r, 'a>, - right: DeflatedRightParen<'r, 'a>, - ) -> Self; -} -impl<'r, 'a, T: ParenthesizedDeflatedNode<'r, 'a>> ParenthesizedDeflatedNode<'r, 'a> for Box { - fn lpar(&self) -> &Vec> { - self.deref().lpar() - } - fn rpar(&self) -> &Vec> { - self.deref().rpar() - } - fn with_parens( - self, - left: DeflatedLeftParen<'r, 'a>, - right: DeflatedRightParen<'r, 'a>, - ) -> Self { - Self::new((*self).with_parens(left, right)) - } -} - pub trait WithLeadingLines<'a> { fn leading_lines(&mut self) -> &mut Vec>; } @@ -89,20 +61,17 @@ pub trait Inflate<'a> where Self: Sized, { - type Inflated; - fn inflate(self, config: &Config<'a>) -> Result; + fn inflate(self, config: &Config<'a>) -> Result; } impl<'a, T: Inflate<'a>> Inflate<'a> for Option { - type Inflated = Option; - fn inflate(self, config: &Config<'a>) -> Result { + fn inflate(self, config: &Config<'a>) -> Result { self.map(|x| x.inflate(config)).transpose() } } impl<'a, T: Inflate<'a> + ?Sized> Inflate<'a> for Box { - type Inflated = Box; - fn inflate(self, config: &Config<'a>) -> Result { + fn inflate(self, config: &Config<'a>) -> Result { match (*self).inflate(config) { Ok(a) => Ok(Box::new(a)), Err(e) => Err(e), @@ -111,72 +80,7 @@ impl<'a, T: Inflate<'a> + ?Sized> Inflate<'a> for Box { } impl<'a, T: Inflate<'a>> Inflate<'a> for Vec { - type Inflated = Vec; - fn inflate(self, config: &Config<'a>) -> Result { + fn inflate(self, config: &Config<'a>) -> Result { self.into_iter().map(|item| item.inflate(config)).collect() } } -#[cfg(feature = "py")] -pub mod py { - use pyo3::{types::PyTuple, IntoPyObjectExt, Py, PyAny, PyResult, Python}; - - // TODO: replace with upstream implementation once - // https://github.com/PyO3/pyo3/issues/1813 is resolved - pub trait TryIntoPy: Sized { - fn try_into_py(self, py: Python) -> PyResult; - } - - // I wish: - // impl> TryIntoPy for T { - // fn try_into_py(self, py: Python) -> PyResult { - // Ok(self.into_py(py)) - // } - // } - - impl TryIntoPy> for bool { - fn try_into_py(self, py: Python) -> PyResult> { - self.into_py_any(py) - } - } - - impl>> TryIntoPy> for Box - where - T: TryIntoPy>, - { - fn try_into_py(self, py: Python) -> PyResult> { - (*self).try_into_py(py) - } - } - - impl TryIntoPy> for Option - where - T: TryIntoPy>, - { - fn try_into_py(self, py: Python) -> PyResult> { - Ok(match self { - None => py.None(), - Some(x) => x.try_into_py(py)?, - }) - } - } - - impl TryIntoPy> for Vec - where - T: TryIntoPy>, - { - fn try_into_py(self, py: Python) -> PyResult> { - let converted = self - .into_iter() - .map(|x| x.try_into_py(py)) - .collect::>>()? - .into_iter(); - PyTuple::new(py, converted)?.into_py_any(py) - } - } - - impl<'a> TryIntoPy> for &'a str { - fn try_into_py(self, py: Python) -> PyResult> { - self.into_py_any(py) - } - } -} diff --git a/native/libcst/src/nodes/whitespace.rs b/native/libcst/src/nodes/whitespace.rs index 474ee384..2c99a048 100644 --- a/native/libcst/src/nodes/whitespace.rs +++ b/native/libcst/src/nodes/whitespace.rs @@ -1,15 +1,13 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. -#[cfg(feature = "py")] -use libcst_derive::TryIntoPy; +use libcst_derive::IntoPy; use super::{Codegen, CodegenState}; -#[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] pub struct SimpleWhitespace<'a>(pub &'a str); impl<'a> Codegen<'a> for SimpleWhitespace<'a> { @@ -18,8 +16,7 @@ impl<'a> Codegen<'a> for SimpleWhitespace<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] pub struct Comment<'a>(pub &'a str); impl<'a> Default for Comment<'a> { @@ -34,8 +31,7 @@ impl<'a> Codegen<'a> for Comment<'a> { } } -#[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] pub struct Newline<'a>(pub Option<&'a str>, pub Fakeness); #[derive(Debug, PartialEq, Eq, Clone)] @@ -63,8 +59,7 @@ impl<'a> Codegen<'a> for Newline<'a> { } } -#[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] pub struct TrailingWhitespace<'a> { pub whitespace: SimpleWhitespace<'a>, pub comment: Option>, @@ -81,8 +76,7 @@ impl<'a> Codegen<'a> for TrailingWhitespace<'a> { } } -#[derive(Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[derive(Debug, Clone, PartialEq, Eq, IntoPy)] pub struct EmptyLine<'a> { pub indent: bool, pub whitespace: SimpleWhitespace<'a>, @@ -130,8 +124,7 @@ impl<'a> EmptyLine<'a> { } } -#[derive(Debug, Eq, PartialEq, Default, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[derive(Debug, Eq, PartialEq, Default, Clone, IntoPy)] pub struct ParenthesizedWhitespace<'a> { pub first_line: TrailingWhitespace<'a>, pub empty_lines: Vec>, @@ -152,8 +145,7 @@ impl<'a> Codegen<'a> for ParenthesizedWhitespace<'a> { } } -#[derive(Debug, Eq, PartialEq, Clone)] -#[cfg_attr(feature = "py", derive(TryIntoPy))] +#[derive(Debug, Eq, PartialEq, Clone, IntoPy)] pub enum ParenthesizableWhitespace<'a> { SimpleWhitespace(SimpleWhitespace<'a>), ParenthesizedWhitespace(ParenthesizedWhitespace<'a>), diff --git a/native/libcst/src/parser/errors.rs b/native/libcst/src/parser/errors.rs index 7fb3b740..44573248 100644 --- a/native/libcst/src/parser/errors.rs +++ b/native/libcst/src/parser/errors.rs @@ -1,8 +1,11 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree +use pyo3::types::{IntoPyDict, PyModule}; +use pyo3::{IntoPy, PyErr, PyErrArguments, Python}; + use crate::parser::grammar::TokVec; use crate::tokenizer::whitespace_parser::WhitespaceError; use crate::tokenizer::TokError; @@ -25,48 +28,56 @@ pub enum ParserError<'a> { OperatorError, } -#[cfg(feature = "py")] -mod py_error { - - use pyo3::types::{IntoPyDict, PyAny, PyAnyMethods, PyModule}; - use pyo3::{Bound, IntoPyObject, PyErr, PyResult, Python}; - - use super::ParserError; - - impl<'a> From> for PyErr { - fn from(e: ParserError) -> Self { - Python::attach(|py| { - let lines = match &e { - ParserError::TokenizerError(_, text) | ParserError::ParserError(_, text) => { - text.lines().collect::>() - } - _ => vec![""], - }; - let (mut line, mut col) = match &e { - ParserError::ParserError(err, ..) => { - (err.location.start_pos.line, err.location.start_pos.column) - } - _ => (0, 0), - }; - if line + 1 > lines.len() { - line = lines.len() - 1; - col = 0; +impl<'a> From> for PyErr { + fn from(e: ParserError) -> Self { + Python::with_gil(|py| { + let lines = match &e { + ParserError::TokenizerError(_, text) | ParserError::ParserError(_, text) => { + text.lines().collect::>() } - match || -> PyResult> { - let kwargs = [ - ("message", e.to_string().into_pyobject(py)?.into_any()), - ("lines", lines.into_pyobject(py)?.into_any()), - ("raw_line", (line + 1).into_pyobject(py)?.into_any()), - ("raw_column", col.into_pyobject(py)?.into_any()), - ] - .into_py_dict(py)?; - let libcst = PyModule::import(py, "libcst")?; - libcst.getattr("ParserSyntaxError")?.call((), Some(&kwargs)) - }() { - Ok(py_err_value) => PyErr::from_value(py_err_value), - Err(e) => e, + _ => vec![""], + }; + let (line, col) = match &e { + ParserError::ParserError(err, ..) => { + (err.location.start_pos.line, err.location.start_pos.column) } - }) - } + _ => (0, 0), + }; + let kwargs = [ + ("message", e.to_string().into_py(py)), + ("lines", lines.into_py(py)), + ("raw_line", line.into_py(py)), + ("raw_column", col.into_py(py)), + ] + .into_py_dict(py); + let libcst = PyModule::import(py, "libcst").expect("libcst cannot be imported"); + PyErr::from_instance( + libcst + .getattr("ParserSyntaxError") + .expect("ParserSyntaxError not found") + .call((), Some(kwargs)) + .expect("failed to instantiate"), + ) + }) + } +} + +struct Details { + message: String, + lines: Vec, + raw_line: u32, + raw_column: u32, +} + +impl<'a> PyErrArguments for Details { + fn arguments(self, py: pyo3::Python) -> pyo3::PyObject { + [ + ("message", self.message.into_py(py)), + ("lines", self.lines.into_py(py)), + ("raw_line", self.raw_line.into_py(py)), + ("raw_column", self.raw_column.into_py(py)), + ] + .into_py_dict(py) + .into_py(py) } } diff --git a/native/libcst/src/parser/grammar.rs b/native/libcst/src/parser/grammar.rs index 86823961..70d2f968 100644 --- a/native/libcst/src/parser/grammar.rs +++ b/native/libcst/src/parser/grammar.rs @@ -1,25 +1,18 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. use std::rc::Rc; -use crate::expression::make_async; -use crate::nodes::deflated::*; -use crate::nodes::expression::make_fstringtext; -use crate::nodes::expression::make_tstringtext; -use crate::nodes::op::make_importstar; -use crate::nodes::traits::ParenthesizedDeflatedNode; +use crate::nodes::*; use crate::parser::ParserError; use crate::tokenizer::{TokType, Token}; -use crate::WithComma; use peg::str::LineCol; use peg::{parser, Parse, ParseElem, RuleResult}; use TokType::{ Async, Await as AWAIT, Dedent, EndMarker, FStringEnd, FStringStart, FStringString, Indent, - Name as NameTok, Newline as NL, Number, String as STRING, TStringEnd, TStringStart, - TStringString, + Name as NameTok, Newline as NL, Number, String as STRING, }; pub type Result<'a, T> = std::result::Result>; @@ -74,52 +67,50 @@ impl<'a> Parse for TokVec<'a> { } } -type TokenRef<'input, 'a> = &'input Token<'a>; +type TokenRef<'a> = Rc>; -impl<'input, 'a: 'input> ParseElem<'input> for TokVec<'a> { - type Element = TokenRef<'input, 'a>; +impl<'a> ParseElem for TokVec<'a> { + type Element = TokenRef<'a>; - fn parse_elem(&'input self, pos: usize) -> RuleResult { + fn parse_elem(&self, pos: usize) -> RuleResult { match self.0.get(pos) { - Some(tok) => RuleResult::Matched(pos + 1, tok), + Some(tok) => RuleResult::Matched(pos + 1, tok.clone()), None => RuleResult::Failed, } } } -const MAX_RECURSION_DEPTH: usize = 3000; - parser! { pub grammar python<'a>(input: &'a str) for TokVec<'a> { // Starting Rules - pub rule file(encoding: Option<&str>) -> Module<'input, 'a> + pub rule file(encoding: Option<&str>) -> Module<'a> = traced(<_file(encoding.unwrap_or("utf-8"))>) - pub rule expression_input() -> Expression<'input, 'a> + pub rule expression_input() -> Expression<'a> = traced() - pub rule statement_input() -> Statement<'input, 'a> + pub rule statement_input() -> Statement<'a> = traced() - rule _file(encoding: &str) -> Module<'input, 'a> + rule _file(encoding: &str) -> Module<'a> = s:statements()? eof:tok(EndMarker, "EOF") { make_module(s.unwrap_or_default(), eof, encoding) } // General statements - rule statements() -> Vec> + rule statements() -> Vec> = statement()+ - rule statement() -> Statement<'input, 'a> + rule statement() -> Statement<'a> = c:compound_stmt() { Statement::Compound(c) } / s:simple_stmts() { Statement::Simple(make_simple_statement_line(s)) } - rule simple_stmts() -> SimpleStatementParts<'input, 'a> + rule simple_stmts() -> SimpleStatementParts<'a> = first_tok:&_ stmts:separated_trailer(, ) nl:tok(NL, "NEWLINE") { SimpleStatementParts { first_tok, @@ -131,9 +122,8 @@ parser! { } #[cache] - rule simple_stmt() -> SmallStatement<'input, 'a> + rule simple_stmt() -> SmallStatement<'a> = assignment() - / &lit("type") s: type_stmt() {SmallStatement::TypeAlias(s)} / e:star_expressions() { SmallStatement::Expr(Expr { value: e, semicolon: None }) } / &lit("return") s:return_stmt() { SmallStatement::Return(s) } // this is expanded from the original grammar's import_stmt rule @@ -150,7 +140,7 @@ parser! { / &lit("nonlocal") s:nonlocal_stmt() {SmallStatement::Nonlocal(s)} - rule compound_stmt() -> CompoundStatement<'input, 'a> + rule compound_stmt() -> CompoundStatement<'a> = &(lit("def") / lit("@") / tok(Async, "ASYNC")) f:function_def() { CompoundStatement::FunctionDef(f) } @@ -165,11 +155,11 @@ parser! { // Simple statements - rule assignment() -> SmallStatement<'input, 'a> + rule assignment() -> SmallStatement<'a> = a:name() col:lit(":") ann:expression() rhs:(eq:lit("=") d:annotated_rhs() {(eq, d)})? { - SmallStatement::AnnAssign(make_ann_assignment( - AssignTargetExpression::Name(Box::new(a)), col, ann, rhs)) + SmallStatement::AnnAssign( + make_ann_assignment(AssignTargetExpression::Name(a), col, ann, rhs)) } // TODO: there's an extra '(' single_target ')' clause here in upstream / a:single_subscript_attribute_target() col:lit(":") ann:expression() @@ -183,10 +173,10 @@ parser! { SmallStatement::AugAssign(make_aug_assign(t, op, rhs)) } - rule annotated_rhs() -> Expression<'input, 'a> + rule annotated_rhs() -> Expression<'a> = yield_expr() / star_expressions() - rule augassign() -> AugOp<'input, 'a> + rule augassign() -> AugOp<'a> = &(lit("+=") / lit("-=") / lit("*=") @@ -203,12 +193,12 @@ parser! { make_aug_op(tok).map_err(|_| "aug_op") } - rule return_stmt() -> Return<'input, 'a> + rule return_stmt() -> Return<'a> = kw:lit("return") a:star_expressions()? { make_return(kw, a) } - rule raise_stmt() -> Raise<'input, 'a> + rule raise_stmt() -> Raise<'a> = kw:lit("raise") exc:expression() rest:(f:lit("from") cau:expression() {(f, cau)})? { make_raise(kw, Some(exc), rest) @@ -217,17 +207,17 @@ parser! { make_raise(kw, None, None) } - rule global_stmt() -> Global<'input, 'a> + rule global_stmt() -> Global<'a> = kw:lit("global") init:(n:name() c:comma() {(n, c)})* last:name() { make_global(kw, init, last) } - rule nonlocal_stmt() -> Nonlocal<'input, 'a> + rule nonlocal_stmt() -> Nonlocal<'a> = kw:lit("nonlocal") init:(n:name() c:comma() {(n, c)})* last:name() { make_nonlocal(kw, init, last) } - rule del_stmt() -> Del<'input, 'a> + rule del_stmt() -> Del<'a> = kw:lit("del") t:del_target() &(lit(";") / tok(NL, "NEWLINE")) { make_del(kw, t) } @@ -235,22 +225,22 @@ parser! { make_del(kw, make_del_tuple(None, t, None)) } - rule yield_stmt() -> Expression<'input, 'a> + rule yield_stmt() -> Expression<'a> = yield_expr() - rule assert_stmt() -> Assert<'input, 'a> + rule assert_stmt() -> Assert<'a> = kw:lit("assert") test:expression() rest:(c:comma() msg:expression() {(c, msg)})? { make_assert(kw, test, rest) } // Import statements - rule import_name() -> Import<'input, 'a> + rule import_name() -> Import<'a> = kw:lit("import") a:dotted_as_names() { make_import(kw, a) } - rule import_from() -> ImportFrom<'input, 'a> + rule import_from() -> ImportFrom<'a> = from:lit("from") dots:dots()? m:dotted_name() import:lit("import") als:import_from_targets() { make_import_from(from, dots.unwrap_or_default(), Some(m), import, als) @@ -260,7 +250,7 @@ parser! { make_import_from(from, dots, None, import, als) } - rule import_from_targets() -> ParenthesizedImportNames<'input, 'a> + rule import_from_targets() -> ParenthesizedImportNames<'a> = lpar:lpar() als:import_from_as_names() c:comma()? rpar:rpar() { let mut als = als; if let (comma@Some(_), Some(mut last)) = (c, als.last_mut()) { @@ -269,31 +259,31 @@ parser! { (Some(lpar), ImportNames::Aliases(als), Some(rpar)) } / als:import_from_as_names() !lit(",") { (None, ImportNames::Aliases(als), None)} - / star:lit("*") { (None, ImportNames::Star(make_importstar()), None) } + / star:lit("*") { (None, ImportNames::Star(ImportStar {}), None) } - rule import_from_as_names() -> Vec> + rule import_from_as_names() -> Vec> = items:separated(, ) { make_import_from_as_names(items.0, items.1) } - rule import_from_as_name() -> ImportAlias<'input, 'a> + rule import_from_as_name() -> ImportAlias<'a> = n:name() asname:(kw:lit("as") z:name() {(kw, z)})? { - make_import_alias(NameOrAttribute::N(Box::new(n)), asname) + make_import_alias(NameOrAttribute::N(n), asname) } - rule dotted_as_names() -> Vec> + rule dotted_as_names() -> Vec> = init:(d:dotted_as_name() c:comma() {d.with_comma(c)})* last:dotted_as_name() { concat(init, vec![last]) } - rule dotted_as_name() -> ImportAlias<'input, 'a> + rule dotted_as_name() -> ImportAlias<'a> = n:dotted_name() asname:(kw:lit("as") z:name() {(kw, z)})? { make_import_alias(n, asname) } // TODO: why does this diverge from CPython? - rule dotted_name() -> NameOrAttribute<'input, 'a> + rule dotted_name() -> NameOrAttribute<'a> = first:name() tail:(dot:lit(".") n:name() {(dot, n)})* { make_name_or_attr(first, tail) } @@ -303,7 +293,7 @@ parser! { // Common elements #[cache] - rule block() -> Suite<'input, 'a> + rule block() -> Suite<'a> = n:tok(NL, "NEWLINE") ind:tok(Indent, "INDENT") s:statements() ded:tok(Dedent, "DEDENT") { make_indented_block(n, ind, s, ded) } @@ -311,50 +301,50 @@ parser! { make_simple_statement_suite(s) } - rule decorators() -> Vec> + rule decorators() -> Vec> = (at:lit("@") e:named_expression() nl:tok(NL, "NEWLINE") { make_decorator(at, e, nl) } )+ // Class definitions - rule class_def() -> ClassDef<'input, 'a> + rule class_def() -> ClassDef<'a> = d:decorators() c:class_def_raw() { c.with_decorators(d) } / class_def_raw() - rule class_def_raw() -> ClassDef<'input, 'a> - = kw:lit("class") n:name() t:type_params()? arg:(l:lpar() a:arguments()? r:rpar() {(l, a, r)})? + rule class_def_raw() -> ClassDef<'a> + = kw:lit("class") n:name() arg:(l:lpar() a:arguments()? r:rpar() {(l, a, r)})? col:lit(":") b:block() {? - make_class_def(kw, n, t, arg, col, b) + make_class_def(kw, n, arg, col, b) } // Function definitions - rule function_def() -> FunctionDef<'input, 'a> + rule function_def() -> FunctionDef<'a> = d:decorators() f:function_def_raw() {f.with_decorators(d)} / function_def_raw() - rule _returns() -> Annotation<'input, 'a> + rule _returns() -> Annotation<'a> = l:lit("->") e:expression() { make_annotation(l, e) } - rule function_def_raw() -> FunctionDef<'input, 'a> - = def:lit("def") n:name() t:type_params()? op:lit("(") params:params()? + rule function_def_raw() -> FunctionDef<'a> + = def:lit("def") n:name() op:lit("(") params:params()? cp:lit(")") ty:_returns()? c:lit(":") b:block() { - make_function_def(None, def, n, t, op, params, cp, ty, c, b) + make_function_def(None, def, n, op, params, cp, ty, c, b) } - / asy:tok(Async, "ASYNC") def:lit("def") n:name() t:type_params()? op:lit("(") params:params()? + / asy:tok(Async, "ASYNC") def:lit("def") n:name() op:lit("(") params:params()? cp:lit(")") ty:_returns()? c:lit(":") b:block() { - make_function_def(Some(asy), def, n, t, op, params, cp, ty, c, b) + make_function_def(Some(asy), def, n, op, params, cp, ty, c, b) } // Function parameters - rule params() -> Parameters<'input, 'a> + rule params() -> Parameters<'a> = parameters() - rule parameters() -> Parameters<'input, 'a> + rule parameters() -> Parameters<'a> = a:slash_no_default() b:param_no_default()* c:param_with_default()* d:star_etc()? { make_parameters(Some(a), concat(b, c), d) } @@ -371,50 +361,42 @@ parser! { make_parameters(None, vec![], Some(d)) } - rule slash_no_default() -> (Vec>, ParamSlash<'input, 'a>) - = a:param_no_default()+ tok:lit("/") com:comma() { - (a, ParamSlash { comma: Some(com), tok }) + rule slash_no_default() -> (Vec>, ParamSlash<'a>) + = a:param_no_default()+ slash:lit("/") com:comma() { + (a, ParamSlash { comma: Some(com)}) } - / a:param_no_default()+ tok:lit("/") &lit(")") { - (a, ParamSlash { comma: None, tok }) + / a:param_no_default()+ slash:lit("/") &lit(")") { + (a, ParamSlash { comma: None }) } - rule slash_with_default() -> (Vec>, ParamSlash<'input, 'a>) - = a:param_no_default()* b:param_with_default()+ tok:lit("/") c:comma() { - (concat(a, b), ParamSlash { comma: Some(c), tok }) + rule slash_with_default() -> (Vec>, ParamSlash<'a>) + = a:param_no_default()* b:param_with_default()+ slash:lit("/") c:comma() { + (concat(a, b), ParamSlash { comma: Some(c) }) } - / a:param_no_default()* b:param_with_default()+ tok:lit("/") &lit(")") { - (concat(a, b), ParamSlash { comma: None, tok }) + / a:param_no_default()* b:param_with_default()+ slash:lit("/") &lit(")") { + (concat(a, b), ParamSlash { comma: None }) } - rule star_etc() -> StarEtc<'input, 'a> + rule star_etc() -> StarEtc<'a> = star:lit("*") a:param_no_default() b:param_maybe_default()* kw:kwds()? { StarEtc(Some(StarArg::Param(Box::new( add_param_star(a, star)))), b, kw) } - / star:lit("*") a:param_no_default_star_annotation() b:param_maybe_default()* kw:kwds()? { - StarEtc(Some(StarArg::Param(Box::new( - add_param_star(a, star)))), b, kw) - } / lit("*") c:comma() b:param_maybe_default()+ kw:kwds()? { - StarEtc(Some(StarArg::Star(Box::new(ParamStar {comma:c }))), b, kw) + StarEtc(Some(StarArg::Star(ParamStar {comma:c })), b, kw) } / kw:kwds() { StarEtc(None, vec![], Some(kw)) } - rule kwds() -> Param<'input, 'a> + rule kwds() -> Param<'a> = star:lit("**") a:param_no_default() { add_param_star(a, star) } - rule param_no_default() -> Param<'input, 'a> + rule param_no_default() -> Param<'a> = a:param() c:lit(",") { add_param_default(a, None, Some(c)) } / a:param() &lit(")") {a} - rule param_no_default_star_annotation() -> Param<'input, 'a> - = a:param_star_annotation() c:lit(",") { add_param_default(a, None, Some(c))} - / a:param_star_annotation() &lit(")") {a} - - rule param_with_default() -> Param<'input, 'a> + rule param_with_default() -> Param<'a> = a:param() def:default() c:lit(",") { add_param_default(a, Some(def), Some(c)) } @@ -422,7 +404,7 @@ parser! { add_param_default(a, Some(def), None) } - rule param_maybe_default() -> Param<'input, 'a> + rule param_maybe_default() -> Param<'a> = a:param() def:default()? c:lit(",") { add_param_default(a, def, Some(c)) } @@ -430,43 +412,24 @@ parser! { add_param_default(a, def, None) } - rule param() -> Param<'input, 'a> + rule param() -> Param<'a> = n:name() a:annotation()? { Param {name: n, annotation: a, ..Default::default() } } - rule param_star_annotation() -> Param<'input, 'a> - = n:name() a:star_annotation() { - Param {name: n, annotation: Some(a), ..Default::default() } - } - - rule annotation() -> Annotation<'input, 'a> + rule annotation() -> Annotation<'a> = col:lit(":") e:expression() { make_annotation(col, e) } - rule star_annotation() -> Annotation<'input, 'a> - = col:lit(":") e:star_expression() { - make_annotation(col, e) - } - - rule default() -> (AssignEqual<'input, 'a>, Expression<'input, 'a>) + rule default() -> (AssignEqual<'a>, Expression<'a>) = eq:lit("=") ex:expression() { (make_assign_equal(eq), ex) } - rule default_or_starred() -> (AssignEqual<'input, 'a>,Option>, Expression<'input, 'a>) - = eq:lit("=") ex:expression() { - (make_assign_equal(eq), None , ex) - } - / eq:lit("=") star:lit("*") ex:expression() { - // make_star_default(eq, star, ex) - (make_assign_equal(eq), Some(star) , ex) - } - // If statement - rule if_stmt() -> If<'input, 'a> + rule if_stmt() -> If<'a> = i:lit("if") a:named_expression() col:lit(":") b:block() elif:elif_stmt() { make_if(i, a, col, b, Some(OrElse::Elif(elif)), false) } @@ -474,7 +437,7 @@ parser! { make_if(i, a, col, b, el.map(OrElse::Else), false) } - rule elif_stmt() -> If<'input, 'a> + rule elif_stmt() -> If<'a> = i:lit("elif") a:named_expression() col:lit(":") b:block() elif:elif_stmt() { make_if(i, a, col, b, Some(OrElse::Elif(elif)), true) } @@ -482,21 +445,21 @@ parser! { make_if(i, a, col, b, el.map(OrElse::Else), true) } - rule else_block() -> Else<'input, 'a> + rule else_block() -> Else<'a> = el:lit("else") col:lit(":") b:block() { make_else(el, col, b) } // While statement - rule while_stmt() -> While<'input, 'a> + rule while_stmt() -> While<'a> = kw:lit("while") test:named_expression() col:lit(":") b:block() el:else_block()? { make_while(kw, test, col, b, el) } // For statement - rule for_stmt() -> For<'input, 'a> + rule for_stmt() -> For<'a> = f:lit("for") t:star_targets() i:lit("in") it:star_expressions() c:lit(":") b:block() el:else_block()? { make_for(None, f, t, i, it, c, b, el) @@ -509,7 +472,7 @@ parser! { // With statement - rule with_stmt() -> With<'input, 'a> + rule with_stmt() -> With<'a> = kw:lit("with") l:lpar() items:separated_trailer(, ) r:rpar() col:lit(":") b:block() { make_with(None, kw, Some(l), comma_separate(items.0, items.1, items.2), Some(r), col, b) @@ -527,8 +490,8 @@ parser! { make_with(Some(asy), kw, None, comma_separate(items.0, items.1, None), None, col, b) } - rule with_item() -> WithItem<'input, 'a> - = e:expression() a:lit("as") t:star_target() &(lit(",") / lit(":") / rpar()) { + rule with_item() -> WithItem<'a> + = e:expression() a:lit("as") t:star_target() &(lit(",") / lit(":")) { make_with_item(e, Some(a), Some(t)) } / e:expression() { @@ -537,7 +500,7 @@ parser! { // Try statement - rule try_stmt() -> Try<'input, 'a> + rule try_stmt() -> Try<'a> = kw:lit("try") lit(":") b:block() f:finally_block() { make_try(kw, b, vec![], None, Some(f)) } @@ -547,49 +510,30 @@ parser! { } // Note: this is separate because TryStar is a different type in LibCST - rule try_star_stmt() -> TryStar<'input, 'a> + rule try_star_stmt() -> TryStar<'a> = kw:lit("try") lit(":") b:block() ex:except_star_block()+ el:else_block()? f:finally_block()? { make_try_star(kw, b, ex, el, f) } // Except statement - rule except_block() -> ExceptHandler<'input, 'a> + + rule except_block() -> ExceptHandler<'a> = kw:lit("except") e:expression() a:(k:lit("as") n:name() {(k, n)})? col:lit(":") b:block() { make_except(kw, Some(e), a, col, b) } - / kw:lit("except") e:expression() other:(c:comma() ex:expression() {(c, ex)})+ tc:(c:comma())? - col:lit(":") b:block() { - let tuple = Expression::Tuple(Box::new(Tuple { - elements: comma_separate(expr_to_element(e), other.into_iter().map(|(comma, expr)| (comma, expr_to_element(expr))).collect(), tc), - lpar: vec![], - rpar: vec![], - })); - - make_except(kw, Some(tuple), None, col, b) - } / kw:lit("except") col:lit(":") b:block() { make_except(kw, None, None, col, b) } - rule except_star_block() -> ExceptStarHandler<'input, 'a> + rule except_star_block() -> ExceptStarHandler<'a> = kw:lit("except") star:lit("*") e:expression() a:(k:lit("as") n:name() {(k, n)})? col:lit(":") b:block() { make_except_star(kw, star, e, a, col, b) } - / kw:lit("except") star:lit("*") e:expression() other:(c:comma() ex:expression() {(c, ex)})+ tc:(c:comma())? - col:lit(":") b:block() { - let tuple = Expression::Tuple(Box::new(Tuple { - elements: comma_separate(expr_to_element(e), other.into_iter().map(|(comma, expr)| (comma, expr_to_element(expr))).collect(), tc), - lpar: vec![], - rpar: vec![], - })); - make_except_star(kw, star, tuple, None, col, b) - } - - rule finally_block() -> Finally<'input, 'a> + rule finally_block() -> Finally<'a> = kw:lit("finally") col:lit(":") b:block() { make_finally(kw, col, b) } @@ -597,49 +541,49 @@ parser! { // Match statement - rule match_stmt() -> Match<'input, 'a> + rule match_stmt() -> Match<'a> = kw:lit("match") subject:subject_expr() col:lit(":") tok(NL, "NEWLINE") i:tok(Indent, "INDENT") cases:case_block()+ d:tok(Dedent, "DEDENT") { make_match(kw, subject, col, i, cases, d) } - rule subject_expr() -> Expression<'input, 'a> + rule subject_expr() -> Expression<'a> = first:star_named_expression() c:comma() rest:star_named_expressions()? { - Expression::Tuple(Box::new( - make_tuple_from_elements(first.with_comma(c), rest.unwrap_or_default())) + Expression::Tuple( + make_tuple_from_elements(first.with_comma(c), rest.unwrap_or_default()) ) } / named_expression() - rule case_block() -> MatchCase<'input, 'a> + rule case_block() -> MatchCase<'a> = kw:lit("case") pattern:patterns() guard:guard()? col:lit(":") body:block() { make_case(kw, pattern, guard, col, body) } - rule guard() -> (TokenRef<'input, 'a>, Expression<'input, 'a>) + rule guard() -> (TokenRef<'a>, Expression<'a>) = kw:lit("if") exp:named_expression() { (kw, exp) } - rule patterns() -> MatchPattern<'input, 'a> + rule patterns() -> MatchPattern<'a> = pats:open_sequence_pattern() { MatchPattern::Sequence(make_list_pattern(None, pats, None)) } / pattern() - rule pattern() -> MatchPattern<'input, 'a> + rule pattern() -> MatchPattern<'a> = as_pattern() / or_pattern() - rule as_pattern() -> MatchPattern<'input, 'a> + rule as_pattern() -> MatchPattern<'a> = pat:or_pattern() kw:lit("as") target:pattern_capture_target() { make_as_pattern(Some(pat), Some(kw), Some(target)) } - rule or_pattern() -> MatchPattern<'input, 'a> + rule or_pattern() -> MatchPattern<'a> = pats:separated(, ) { make_or_pattern(pats.0, pats.1) } - rule closed_pattern() -> MatchPattern<'input, 'a> + rule closed_pattern() -> MatchPattern<'a> = literal_pattern() / capture_pattern() / wildcard_pattern() @@ -649,7 +593,7 @@ parser! { / mapping_pattern() / class_pattern() - rule literal_pattern() -> MatchPattern<'input, 'a> + rule literal_pattern() -> MatchPattern<'a> = val:signed_number() !(lit("+") / lit("-")) { make_match_value(val) } / val:complex_number() { make_match_value(val) } / val:strings() { make_match_value(val.into()) } @@ -657,67 +601,67 @@ parser! { / n:lit("True") { make_match_singleton(make_name(n)) } / n:lit("False") { make_match_singleton(make_name(n)) } - rule literal_expr() -> Expression<'input, 'a> + rule literal_expr() -> Expression<'a> = val:signed_number() !(lit("+") / lit("-")) { val } / val:complex_number() { val } / val:strings() { val.into() } - / n:lit("None") { Expression::Name(Box::new(make_name(n))) } - / n:lit("True") { Expression::Name(Box::new(make_name(n))) } - / n:lit("False") { Expression::Name(Box::new(make_name(n))) } + / n:lit("None") { Expression::Name(make_name(n)) } + / n:lit("True") { Expression::Name(make_name(n)) } + / n:lit("False") { Expression::Name(make_name(n)) } - rule complex_number() -> Expression<'input, 'a> + rule complex_number() -> Expression<'a> = re:signed_real_number() op:(lit("+")/lit("-")) im:imaginary_number() {? make_binary_op(re, op, im).map_err(|_| "complex number") } - rule signed_number() -> Expression<'input, 'a> + rule signed_number() -> Expression<'a> = n:tok(Number, "number") { make_number(n) } / op:lit("-") n:tok(Number, "number") {? make_unary_op(op, make_number(n)).map_err(|_| "signed number") } - rule signed_real_number() -> Expression<'input, 'a> + rule signed_real_number() -> Expression<'a> = real_number() / op:lit("-") n:real_number() {? make_unary_op(op, n).map_err(|_| "signed real number") } - rule real_number() -> Expression<'input, 'a> + rule real_number() -> Expression<'a> = n:tok(Number, "number") {? ensure_real_number(n) } - rule imaginary_number() -> Expression<'input, 'a> + rule imaginary_number() -> Expression<'a> = n:tok(Number, "number") {? ensure_imaginary_number(n) } - rule capture_pattern() -> MatchPattern<'input, 'a> + rule capture_pattern() -> MatchPattern<'a> = t:pattern_capture_target() { make_as_pattern(None, None, Some(t)) } - rule pattern_capture_target() -> Name<'input, 'a> + rule pattern_capture_target() -> Name<'a> = !lit("_") n:name() !(lit(".") / lit("(") / lit("=")) { n } - rule wildcard_pattern() -> MatchPattern<'input, 'a> + rule wildcard_pattern() -> MatchPattern<'a> = lit("_") { make_as_pattern(None, None, None) } - rule value_pattern() -> MatchPattern<'input, 'a> + rule value_pattern() -> MatchPattern<'a> = v:attr() !(lit(".") / lit("(") / lit("=")) { make_match_value(v.into()) } // In upstream attr and name_or_attr are mutually recursive, but rust-peg // doesn't support this yet. - rule attr() -> NameOrAttribute<'input, 'a> + rule attr() -> NameOrAttribute<'a> = &(name() lit(".")) v:name_or_attr() { v } #[cache_left_rec] - rule name_or_attr() -> NameOrAttribute<'input, 'a> + rule name_or_attr() -> NameOrAttribute<'a> = val:name_or_attr() d:lit(".") attr:name() { - NameOrAttribute::A(Box::new(make_attribute(val.into(), d, attr))) + NameOrAttribute::A(make_attribute(val.into(), d, attr)) } - / n:name() { NameOrAttribute::N(Box::new(n)) } + / n:name() { NameOrAttribute::N(n) } - rule group_pattern() -> MatchPattern<'input, 'a> + rule group_pattern() -> MatchPattern<'a> = l:lpar() pat:pattern() r:rpar() { pat.with_parens(l, r) } - rule sequence_pattern() -> MatchPattern<'input, 'a> + rule sequence_pattern() -> MatchPattern<'a> = l:lbrak() pats:maybe_sequence_pattern()? r:rbrak() { MatchPattern::Sequence( make_list_pattern(Some(l), pats.unwrap_or_default(), Some(r)) @@ -727,17 +671,17 @@ parser! { MatchPattern::Sequence(make_tuple_pattern(l, pats.unwrap_or_default(), r)) } - rule open_sequence_pattern() -> Vec> + rule open_sequence_pattern() -> Vec> = pat:maybe_star_pattern() c:comma() pats:maybe_sequence_pattern()? { make_open_sequence_pattern(pat, c, pats.unwrap_or_default()) } - rule maybe_sequence_pattern() -> Vec> + rule maybe_sequence_pattern() -> Vec> = pats:separated_trailer(, ) { comma_separate(pats.0, pats.1, pats.2) } - rule maybe_star_pattern() -> StarrableMatchSequenceElement<'input, 'a> + rule maybe_star_pattern() -> StarrableMatchSequenceElement<'a> = s:star_pattern() { StarrableMatchSequenceElement::Starred(s) } / p:pattern() { StarrableMatchSequenceElement::Simple( @@ -745,11 +689,11 @@ parser! { ) } - rule star_pattern() -> MatchStar<'input, 'a> + rule star_pattern() -> MatchStar<'a> = star:lit("*") t:pattern_capture_target() {make_match_star(star, Some(t))} / star:lit("*") t:wildcard_pattern() { make_match_star(star, None) } - rule mapping_pattern() -> MatchPattern<'input, 'a> + rule mapping_pattern() -> MatchPattern<'a> = l:lbrace() r:rbrace() { make_match_mapping(l, vec![], None, None, None, None, r) } @@ -764,20 +708,20 @@ parser! { make_match_mapping(l, items, trail, None, None, None, r) } - rule items_pattern() -> Vec> + rule items_pattern() -> Vec> = pats:separated(, ) { comma_separate(pats.0, pats.1, None) } - rule key_value_pattern() -> MatchMappingElement<'input, 'a> + rule key_value_pattern() -> MatchMappingElement<'a> = key:(literal_expr() / a:attr() {a.into()}) colon:lit(":") pat:pattern() { make_match_mapping_element(key, colon, pat) } - rule double_star_pattern() -> (TokenRef<'input, 'a>, Name<'input, 'a>) + rule double_star_pattern() -> (TokenRef<'a>, Name<'a>) = star:lit("**") n:pattern_capture_target() { (star, n) } - rule class_pattern() -> MatchPattern<'input, 'a> + rule class_pattern() -> MatchPattern<'a> = cls:name_or_attr() l:lit("(") r:lit(")") { make_class_pattern(cls, l, vec![], None, vec![], None, r) } @@ -792,115 +736,93 @@ parser! { make_class_pattern(cls, l, pats, Some(c), kwds, trail, r) } - rule positional_patterns() -> Vec> + rule positional_patterns() -> Vec> = pats:separated(, ) { comma_separate(pats.0, pats.1, None) } - rule keyword_patterns() -> Vec> + rule keyword_patterns() -> Vec> = pats:separated(, ) { comma_separate(pats.0, pats.1, None) } - rule keyword_pattern() -> MatchKeywordElement<'input, 'a> + rule keyword_pattern() -> MatchKeywordElement<'a> = arg:name() eq:lit("=") value:pattern() { make_match_keyword_element(arg, eq, value) } - // Type statement - - rule type_stmt() -> TypeAlias<'input, 'a> - = t:lit("type") n:name() ps:type_params()? eq:lit("=") v:expression() { - make_type_alias(t, n, ps, eq, v) - } - - // Type parameter declaration - - rule type_params() -> TypeParameters<'input, 'a> - = lb:lbrak() ps:separated_trailer(, ) rb:rbrak() { - make_type_parameters(lb, comma_separate(ps.0, ps.1, ps.2), rb) - } - - rule type_param() -> TypeParam<'input, 'a> - = n:name() b:type_param_bound()? def:default()? { make_type_var(n, b, def) } - / s:lit("*") n:name() def:default_or_starred()? { make_type_var_tuple(s, n, def) } - / s:lit("**") n:name() def:default()? { make_param_spec(s, n, def) } - - - rule type_param_bound() -> TypeParamBound<'input, 'a> - = c:lit(":") e:expression() { make_type_param_bound(c, e) } // Expressions #[cache] - rule expression() -> Expression<'input, 'a> + rule expression() -> Expression<'a> = _conditional_expression() / lambdef() - rule _conditional_expression() -> Expression<'input, 'a> + rule _conditional_expression() -> Expression<'a> = body:disjunction() i:lit("if") test:disjunction() e:lit("else") oe:expression() { - Expression::IfExp(Box::new(make_ifexp(body, i, test, e, oe))) + Expression::IfExp(make_ifexp(body, i, test, e, oe)) } / disjunction() - rule yield_expr() -> Expression<'input, 'a> + rule yield_expr() -> Expression<'a> = y:lit("yield") f:lit("from") a:expression() { - Expression::Yield(Box::new(make_yield(y, Some(f), Some(a)))) + Expression::Yield(make_yield(y, Some(f), Some(a))) } / y:lit("yield") a:star_expressions()? { - Expression::Yield(Box::new(make_yield(y, None, a))) + Expression::Yield(make_yield(y, None, a)) } - rule star_expressions() -> Expression<'input, 'a> + rule star_expressions() -> Expression<'a> = first:star_expression() rest:(comma:comma() e:star_expression() { (comma, expr_to_element(e)) })+ comma:comma()? { - Expression::Tuple(Box::new(make_tuple(expr_to_element(first), rest, comma, None, None))) + Expression::Tuple(make_tuple(expr_to_element(first), rest, comma, None, None)) } / e:star_expression() comma:comma() { - Expression::Tuple(Box::new(make_tuple(expr_to_element(e), vec![], Some(comma), None, None))) + Expression::Tuple(make_tuple(expr_to_element(e), vec![], Some(comma), None, None)) } / star_expression() #[cache] - rule star_expression() -> Expression<'input, 'a> + rule star_expression() -> Expression<'a> = star:lit("*") e:bitwise_or() { - Expression::StarredElement(Box::new(make_starred_element(star, expr_to_element(e)))) + Expression::StarredElement(make_starred_element(star, expr_to_element(e))) } / expression() - rule star_named_expressions() -> Vec> + rule star_named_expressions() -> Vec> = exps:separated_trailer(, ) { comma_separate(exps.0, exps.1, exps.2) } - rule star_named_expression() -> Element<'input, 'a> + rule star_named_expression() -> Element<'a> = star:lit("*") e:bitwise_or() { - Element::Starred(Box::new(make_starred_element(star, expr_to_element(e)))) + Element::Starred(make_starred_element(star, expr_to_element(e))) } / e:named_expression() { expr_to_element(e) } - rule named_expression() -> Expression<'input, 'a> + rule named_expression() -> Expression<'a> = a:name() op:lit(":=") b:expression() { - Expression::NamedExpr(Box::new(make_named_expr(a, op, b))) + Expression::NamedExpr(make_named_expr(a, op, b)) } / e:expression() !lit(":=") { e } #[cache] - rule disjunction() -> Expression<'input, 'a> + rule disjunction() -> Expression<'a> = a:conjunction() b:(or:lit("or") inner:conjunction() { (or, inner) })+ {? make_boolean_op(a, b).map_err(|e| "expected disjunction") } / conjunction() #[cache] - rule conjunction() -> Expression<'input, 'a> + rule conjunction() -> Expression<'a> = a:inversion() b:(and:lit("and") inner:inversion() { (and, inner) })+ {? make_boolean_op(a, b).map_err(|e| "expected conjunction") } / inversion() #[cache] - rule inversion() -> Expression<'input, 'a> + rule inversion() -> Expression<'a> = not:lit("not") a:inversion() {? make_unary_op(not, a).map_err(|e| "expected inversion") } @@ -909,14 +831,14 @@ parser! { // Comparison operators #[cache] - rule comparison() -> Expression<'input, 'a> + rule comparison() -> Expression<'a> = a:bitwise_or() b:compare_op_bitwise_or_pair()+ { make_comparison(a, b) } / bitwise_or() // This implementation diverges slightly from CPython (3.9) to avoid bloating // the parser cache and increase readability. #[cache] - rule compare_op_bitwise_or_pair() -> (CompOp<'input, 'a>, Expression<'input, 'a>) + rule compare_op_bitwise_or_pair() -> (CompOp<'a>, Expression<'a>) = _op_bitwise_or("==") / _op_bitwise_or("!=") // TODO: support barry_as_flufl / _op_bitwise_or("<=") @@ -928,14 +850,14 @@ parser! { / _op_bitwise_or2("is", "not") / _op_bitwise_or("is") - rule _op_bitwise_or(o: &'static str) -> (CompOp<'input, 'a>, Expression<'input, 'a>) + rule _op_bitwise_or(o: &'static str) -> (CompOp<'a>, Expression<'a>) = op:lit(o) e:bitwise_or() {? make_comparison_operator(op) .map(|op| (op, e)) .map_err(|_| "comparison") } - rule _op_bitwise_or2(first: &'static str, second: &'static str) -> (CompOp<'input, 'a>, Expression<'input, 'a>) + rule _op_bitwise_or2(first: &'static str, second: &'static str) -> (CompOp<'a>, Expression<'a>) = f:lit(first) s:lit(second) e:bitwise_or() {? make_comparison_operator_2(f, s) .map(|op| (op, e)) @@ -943,28 +865,28 @@ parser! { } #[cache_left_rec] - rule bitwise_or() -> Expression<'input, 'a> + rule bitwise_or() -> Expression<'a> = a:bitwise_or() op:lit("|") b:bitwise_xor() {? make_binary_op(a, op, b).map_err(|e| "expected bitwise_or") } / bitwise_xor() #[cache_left_rec] - rule bitwise_xor() -> Expression<'input, 'a> + rule bitwise_xor() -> Expression<'a> = a:bitwise_xor() op:lit("^") b:bitwise_and() {? make_binary_op(a, op, b).map_err(|e| "expected bitwise_xor") } / bitwise_and() #[cache_left_rec] - rule bitwise_and() -> Expression<'input, 'a> + rule bitwise_and() -> Expression<'a> = a:bitwise_and() op:lit("&") b:shift_expr() {? make_binary_op(a, op, b).map_err(|e| "expected bitwise_and") } / shift_expr() #[cache_left_rec] - rule shift_expr() -> Expression<'input, 'a> + rule shift_expr() -> Expression<'a> = a:shift_expr() op:lit("<<") b:sum() {? make_binary_op(a, op, b).map_err(|e| "expected shift_expr") } @@ -974,7 +896,7 @@ parser! { / sum() #[cache_left_rec] - rule sum() -> Expression<'input, 'a> + rule sum() -> Expression<'a> = a:sum() op:lit("+") b:term() {? make_binary_op(a, op, b).map_err(|e| "expected sum") } @@ -984,7 +906,7 @@ parser! { / term() #[cache_left_rec] - rule term() -> Expression<'input, 'a> + rule term() -> Expression<'a> = a:term() op:lit("*") b:factor() {? make_binary_op(a, op, b).map_err(|e| "expected term") } @@ -1003,7 +925,7 @@ parser! { / factor() #[cache] - rule factor() -> Expression<'input, 'a> + rule factor() -> Expression<'a> = op:lit("+") a:factor() {? make_unary_op(op, a).map_err(|e| "expected factor") } @@ -1015,7 +937,7 @@ parser! { } / power() - rule power() -> Expression<'input, 'a> + rule power() -> Expression<'a> = a:await_primary() op:lit("**") b:factor() {? make_binary_op(a, op, b).map_err(|e| "expected power") } @@ -1023,74 +945,73 @@ parser! { // Primary elements - rule await_primary() -> Expression<'input, 'a> + rule await_primary() -> Expression<'a> = aw:tok(AWAIT, "AWAIT") e:primary() { - Expression::Await(Box::new(make_await(aw, e))) + Expression::Await(make_await(aw, e)) } / primary() #[cache_left_rec] - rule primary() -> Expression<'input, 'a> + rule primary() -> Expression<'a> = v:primary() dot:lit(".") attr:name() { - Expression::Attribute(Box::new(make_attribute(v, dot, attr))) + Expression::Attribute(make_attribute(v, dot, attr)) } / a:primary() b:genexp() { - Expression::Call(Box::new(make_genexp_call(a, b))) + Expression::Call(make_genexp_call(a, b)) } / f:primary() lpar:lit("(") arg:arguments()? rpar:lit(")") { - Expression::Call(Box::new(make_call(f, lpar, arg.unwrap_or_default(), rpar))) + Expression::Call(make_call(f, lpar, arg.unwrap_or_default(), rpar)) } / v:primary() lbrak:lbrak() s:slices() rbrak:rbrak() { - Expression::Subscript(Box::new(make_subscript(v, lbrak, s, rbrak))) + Expression::Subscript(make_subscript(v, lbrak, s, rbrak)) } / atom() - rule slices() -> Vec> + rule slices() -> Vec> = s:slice() !lit(",") { vec![SubscriptElement { slice: s, comma: None }] } / slices:separated_trailer(, ) { make_slices(slices.0, slices.1, slices.2) } - rule slice() -> BaseSlice<'input, 'a> + rule slice() -> BaseSlice<'a> = l:expression()? col:lit(":") u:expression()? rest:(c:lit(":") s:expression()? {(c, s)})? { make_slice(l, col, u, rest) } - / e:starred_expression() { make_index_from_arg(e) } - / v:named_expression() { make_index(v) } + / v:expression() { make_index(v) } - rule atom() -> Expression<'input, 'a> - = n:name() { Expression::Name(Box::new(n)) } - / n:lit("True") { Expression::Name(Box::new(make_name(n))) } - / n:lit("False") { Expression::Name(Box::new(make_name(n))) } - / n:lit("None") { Expression::Name(Box::new(make_name(n))) } - / &(tok(STRING, "") / tok(FStringStart, "") / tok(TStringStart, "")) s:strings() {s.into()} + rule atom() -> Expression<'a> + = n:name() { Expression::Name(n) } + / n:lit("True") { Expression::Name(make_name(n)) } + / n:lit("False") { Expression::Name(make_name(n)) } + / n:lit("None") { Expression::Name(make_name(n)) } + / &(tok(STRING, "") / tok(FStringStart, "")) s:strings() {s.into()} / n:tok(Number, "NUMBER") { make_number(n) } - / &lit("(") e:(tuple() / group() / (g:genexp() {Expression::GeneratorExp(Box::new(g))})) {e} + / &lit("(") e:(tuple() / group() / (g:genexp() {Expression::GeneratorExp(g)})) {e} / &lit("[") e:(list() / listcomp()) {e} / &lit("{") e:(dict() / set() / dictcomp() / setcomp()) {e} - / lit("...") { Expression::Ellipsis(Box::new(Ellipsis {lpar: vec![], rpar: vec![]}))} + / lit("...") { Expression::Ellipsis(Ellipsis {lpar: vec![], rpar: vec![]})} - rule group() -> Expression<'input, 'a> + rule group() -> Expression<'a> = lpar:lpar() e:(yield_expr() / named_expression()) rpar:rpar() { e.with_parens(lpar, rpar) } // Lambda functions - rule lambdef() -> Expression<'input, 'a> + rule lambdef() -> Expression<'a> = kw:lit("lambda") p:lambda_params()? c:lit(":") b:expression() { - Expression::Lambda(Box::new(make_lambda(kw, p.unwrap_or_default(), c, b))) + Expression::Lambda(make_lambda(kw, p.unwrap_or_default(), c, b)) } - rule lambda_params() -> Parameters<'input, 'a> + rule lambda_params() -> Parameters<'a> = lambda_parameters() // lambda_parameters etc. duplicates parameters but without annotations or type // comments, and if there's no comma after a parameter, we expect a colon, not a // close parenthesis. - rule lambda_parameters() -> Parameters<'input, 'a> + rule lambda_parameters() -> Parameters<'a> = a:lambda_slash_no_default() b:lambda_param_no_default()* c:lambda_param_with_default()* d:lambda_star_etc()? { make_parameters(Some(a), concat(b, c), d) @@ -1110,23 +1031,23 @@ parser! { make_parameters(None, vec![], Some(d)) } - rule lambda_slash_no_default() -> (Vec>, ParamSlash<'input, 'a>) - = a:lambda_param_no_default()+ tok:lit("/") com:comma() { - (a, ParamSlash { comma: Some(com), tok } ) + rule lambda_slash_no_default() -> (Vec>, ParamSlash<'a>) + = a:lambda_param_no_default()+ slash:lit("/") com:comma() { + (a, ParamSlash { comma: Some(com) } ) } - / a:lambda_param_no_default()+ tok:lit("/") &lit(":") { - (a, ParamSlash { comma: None, tok }) + / a:lambda_param_no_default()+ slash:lit("/") &lit(":") { + (a, ParamSlash { comma: None }) } - rule lambda_slash_with_default() -> (Vec>, ParamSlash<'input, 'a>) - = a:lambda_param_no_default()* b:lambda_param_with_default()+ tok:lit("/") c:comma(){ - (concat(a, b), ParamSlash { comma: Some(c), tok }) + rule lambda_slash_with_default() -> (Vec>, ParamSlash<'a>) + = a:lambda_param_no_default()* b:lambda_param_with_default()+ slash:lit("/") c:comma(){ + (concat(a, b), ParamSlash { comma: Some(c) }) } - / a:lambda_param_no_default()* b:lambda_param_with_default()+ tok:lit("/") &lit(":") { - (concat(a, b), ParamSlash { comma: None, tok }) + / a:lambda_param_no_default()* b:lambda_param_with_default()+ slash:lit("/") &lit(":") { + (concat(a, b), ParamSlash { comma: None }) } - rule lambda_star_etc() -> StarEtc<'input, 'a> + rule lambda_star_etc() -> StarEtc<'a> = star:lit("*") a:lambda_param_no_default() b:lambda_param_maybe_default()* kw:lambda_kwds()? { StarEtc(Some(StarArg::Param( @@ -1134,22 +1055,22 @@ parser! { )), b, kw) } / lit("*") c:comma() b:lambda_param_maybe_default()+ kw:lambda_kwds()? { - StarEtc(Some(StarArg::Star(Box::new(ParamStar {comma: c}))), b, kw) + StarEtc(Some(StarArg::Star(ParamStar {comma: c})), b, kw) } / kw:lambda_kwds() { StarEtc(None, vec![], Some(kw)) } - rule lambda_kwds() -> Param<'input, 'a> + rule lambda_kwds() -> Param<'a> = star:lit("**") a:lambda_param_no_default() { add_param_star(a, star) } - rule lambda_param_no_default() -> Param<'input, 'a> + rule lambda_param_no_default() -> Param<'a> = a:lambda_param() c:lit(",") { add_param_default(a, None, Some(c)) } / a:lambda_param() &lit(":") {a} - rule lambda_param_with_default() -> Param<'input, 'a> + rule lambda_param_with_default() -> Param<'a> = a:lambda_param() def:default() c:lit(",") { add_param_default(a, Some(def), Some(c)) } @@ -1157,7 +1078,7 @@ parser! { add_param_default(a, Some(def), None) } - rule lambda_param_maybe_default() -> Param<'input, 'a> + rule lambda_param_maybe_default() -> Param<'a> = a:lambda_param() def:default()? c:lit(",") { add_param_default(a, def, Some(c)) } @@ -1165,70 +1086,70 @@ parser! { add_param_default(a, def, None) } - rule lambda_param() -> Param<'input, 'a> + rule lambda_param() -> Param<'a> = name:name() { Param { name, ..Default::default() } } // Literals - rule strings() -> String<'input, 'a> + rule strings() -> String<'a> = s:(str:tok(STRING, "STRING") t:&_ {(make_string(str), t)} - / str:fstring() t:&_ {(String::Formatted(str), t)} / str:tstring() t:&_ {(String::Templated(str), t)})+ {? + / str:fstring() t:&_ {(String::Formatted(str), t)})+ { make_strings(s) } - rule list() -> Expression<'input, 'a> + rule list() -> Expression<'a> = lbrak:lbrak() e:star_named_expressions()? rbrak:rbrak() { - Expression::List(Box::new( - make_list(lbrak, e.unwrap_or_default(), rbrak)) + Expression::List( + make_list(lbrak, e.unwrap_or_default(), rbrak) ) } - rule tuple() -> Expression<'input, 'a> + rule tuple() -> Expression<'a> = lpar:lpar() first:star_named_expression() &lit(",") rest:(c:comma() e:star_named_expression() {(c, e)})* trailing_comma:comma()? rpar:rpar() { - Expression::Tuple(Box::new( + Expression::Tuple( make_tuple(first, rest, trailing_comma, Some(lpar), Some(rpar)) - )) + ) } / lpar:lpar() rpar:lit(")") { - Expression::Tuple(Box::new(Tuple::default().with_parens( - lpar, RightParen { rpar_tok: rpar } - )))} + Expression::Tuple(Tuple::default().with_parens( + lpar, RightParen { whitespace_before: Default::default(), rpar_tok: rpar } + ))} - rule set() -> Expression<'input, 'a> + rule set() -> Expression<'a> = lbrace:lbrace() e:star_named_expressions()? rbrace:rbrace() { - Expression::Set(Box::new(make_set(lbrace, e.unwrap_or_default(), rbrace))) + Expression::Set(make_set(lbrace, e.unwrap_or_default(), rbrace)) } // Dicts - rule dict() -> Expression<'input, 'a> + rule dict() -> Expression<'a> = lbrace:lbrace() els:double_starred_keypairs()? rbrace:rbrace() { - Expression::Dict(Box::new(make_dict(lbrace, els.unwrap_or_default(), rbrace))) + Expression::Dict(make_dict(lbrace, els.unwrap_or_default(), rbrace)) } - rule double_starred_keypairs() -> Vec> + rule double_starred_keypairs() -> Vec> = pairs:separated_trailer(, ) { make_double_starred_keypairs(pairs.0, pairs.1, pairs.2) } - rule double_starred_kvpair() -> DictElement<'input, 'a> + rule double_starred_kvpair() -> DictElement<'a> = s:lit("**") e:bitwise_or() { DictElement::Starred(make_double_starred_element(s, e)) } / k:kvpair() { make_dict_element(k) } - rule kvpair() -> (Expression<'input, 'a>, TokenRef<'input, 'a>, Expression<'input, 'a>) + rule kvpair() -> (Expression<'a>, TokenRef<'a>, Expression<'a>) = k:expression() colon:lit(":") v:expression() { (k, colon, v) } // Comprehensions & generators - rule for_if_clauses() -> CompFor<'input, 'a> - = c:for_if_clause()+ {? merge_comp_fors(c) } + rule for_if_clauses() -> CompFor<'a> + = c:for_if_clause()+ { merge_comp_fors(c) } - rule for_if_clause() -> CompFor<'input, 'a> + rule for_if_clause() -> CompFor<'a> = asy:_async() f:lit("for") tgt:star_targets() i:lit("in") iter:disjunction() ifs:_comp_if()* { make_for_if(Some(asy), f, tgt, i, iter, ifs) @@ -1238,42 +1159,42 @@ parser! { make_for_if(None, f, tgt, i, iter, ifs) } - rule _comp_if() -> CompIf<'input, 'a> + rule _comp_if() -> CompIf<'a> = kw:lit("if") cond:disjunction() { make_comp_if(kw, cond) } - rule listcomp() -> Expression<'input, 'a> + rule listcomp() -> Expression<'a> = lbrak:lbrak() elt:named_expression() comp:for_if_clauses() rbrak:rbrak() { - Expression::ListComp(Box::new(make_list_comp(lbrak, elt, comp, rbrak))) + Expression::ListComp(make_list_comp(lbrak, elt, comp, rbrak)) } - rule setcomp() -> Expression<'input, 'a> + rule setcomp() -> Expression<'a> = l:lbrace() elt:named_expression() comp:for_if_clauses() r:rbrace() { - Expression::SetComp(Box::new(make_set_comp(l, elt, comp, r))) + Expression::SetComp(make_set_comp(l, elt, comp, r)) } - rule genexp() -> GeneratorExp<'input, 'a> + rule genexp() -> GeneratorExp<'a> = lpar:lpar() g:_bare_genexp() rpar:rpar() { g.with_parens(lpar, rpar) } - rule _bare_genexp() -> GeneratorExp<'input, 'a> + rule _bare_genexp() -> GeneratorExp<'a> = elt:named_expression() comp:for_if_clauses() { make_bare_genexp(elt, comp) } - rule dictcomp() -> Expression<'input, 'a> + rule dictcomp() -> Expression<'a> = lbrace:lbrace() elt:kvpair() comp:for_if_clauses() rbrace:rbrace() { - Expression::DictComp(Box::new(make_dict_comp(lbrace, elt, comp, rbrace))) + Expression::DictComp(make_dict_comp(lbrace, elt, comp, rbrace)) } // Function call arguments - rule arguments() -> Vec> + rule arguments() -> Vec> = a:args() trail:comma()? &lit(")") {add_arguments_trailing_comma(a, trail)} - rule args() -> Vec> + rule args() -> Vec> = first:_posarg() rest:(c:comma() a:_posarg() {(c, a)})* kw:(c:comma() k:kwargs() {(c, k)})? { @@ -1285,11 +1206,11 @@ parser! { } / kwargs() - rule _posarg() -> Arg<'input, 'a> + rule _posarg() -> Arg<'a> = a:(starred_expression() / e:named_expression() { make_arg(e) }) !lit("=") { a } - rule kwargs() -> Vec> + rule kwargs() -> Vec> = sitems:separated(, ) scomma:comma() ditems:separated(, ) { @@ -1305,18 +1226,18 @@ parser! { comma_separate(items.0, items.1, None) } - rule starred_expression() -> Arg<'input, 'a> + rule starred_expression() -> Arg<'a> = star:lit("*") e:expression() { make_star_arg(star, e) } - rule kwarg_or_starred() -> Arg<'input, 'a> + rule kwarg_or_starred() -> Arg<'a> = _kwarg() / starred_expression() - rule kwarg_or_double_starred() -> Arg<'input, 'a> + rule kwarg_or_double_starred() -> Arg<'a> = _kwarg() / star:lit("**") e:expression() { make_star_arg(star, e) } - rule _kwarg() -> Arg<'input, 'a> + rule _kwarg() -> Arg<'a> = n:name() eq:lit("=") v:expression() { make_kwarg(n, eq, v) } @@ -1324,22 +1245,22 @@ parser! { // Assignment targets // Generic targets - rule star_targets() -> AssignTargetExpression<'input, 'a> + rule star_targets() -> AssignTargetExpression<'a> = a:star_target() !lit(",") {a} / targets:separated_trailer(, ) { - AssignTargetExpression::Tuple(Box::new( + AssignTargetExpression::Tuple( make_tuple(targets.0, targets.1, targets.2, None, None) - )) + ) } - rule star_targets_list_seq() -> Vec> + rule star_targets_list_seq() -> Vec> = targets:separated_trailer(, ) { comma_separate(targets.0, targets.1, targets.2) } // This differs from star_targets below because it requires at least two items // in the tuple - rule star_targets_tuple_seq() -> Tuple<'input, 'a> + rule star_targets_tuple_seq() -> Tuple<'a> = first:(t:star_target() {assign_target_to_element(t)}) rest:(c:comma() t:star_target() {(c, assign_target_to_element(t))})+ trail:comma()? { @@ -1350,69 +1271,69 @@ parser! { } #[cache] - rule star_target() -> AssignTargetExpression<'input, 'a> + rule star_target() -> AssignTargetExpression<'a> = star:lit("*") !lit("*") t:star_target() { - AssignTargetExpression::StarredElement(Box::new( + AssignTargetExpression::StarredElement( make_starred_element(star, assign_target_to_element(t)) - )) + ) } / target_with_star_atom() #[cache] - rule target_with_star_atom() -> AssignTargetExpression<'input, 'a> + rule target_with_star_atom() -> AssignTargetExpression<'a> = a:t_primary() dot:lit(".") n:name() !t_lookahead() { - AssignTargetExpression::Attribute(Box::new(make_attribute(a, dot, n))) + AssignTargetExpression::Attribute(make_attribute(a, dot, n)) } / a:t_primary() lbrak:lbrak() s:slices() rbrak:rbrak() !t_lookahead() { - AssignTargetExpression::Subscript(Box::new( + AssignTargetExpression::Subscript( make_subscript(a, lbrak, s, rbrak) - )) + ) } / a:star_atom() {a} - rule star_atom() -> AssignTargetExpression<'input, 'a> - = a:name() { AssignTargetExpression::Name(Box::new(a)) } + rule star_atom() -> AssignTargetExpression<'a> + = a:name() { AssignTargetExpression::Name(a) } / lpar:lpar() a:target_with_star_atom() rpar:rpar() { a.with_parens(lpar, rpar) } / lpar:lpar() a:star_targets_tuple_seq()? rpar:rpar() { - AssignTargetExpression::Tuple(Box::new( + AssignTargetExpression::Tuple( a.unwrap_or_default().with_parens(lpar, rpar) - )) + ) } / lbrak:lbrak() a:star_targets_list_seq()? rbrak:rbrak() { - AssignTargetExpression::List(Box::new( + AssignTargetExpression::List( make_list(lbrak, a.unwrap_or_default(), rbrak) - )) + ) } - rule single_target() -> AssignTargetExpression<'input, 'a> + rule single_target() -> AssignTargetExpression<'a> = single_subscript_attribute_target() - / n:name() { AssignTargetExpression::Name(Box::new(n)) } + / n:name() { AssignTargetExpression::Name(n) } / lpar:lpar() t:single_target() rpar:rpar() { t.with_parens(lpar, rpar) } - rule single_subscript_attribute_target() -> AssignTargetExpression<'input, 'a> + rule single_subscript_attribute_target() -> AssignTargetExpression<'a> = a:t_primary() dot:lit(".") n:name() !t_lookahead() { - AssignTargetExpression::Attribute(Box::new(make_attribute(a, dot, n))) + AssignTargetExpression::Attribute(make_attribute(a, dot, n)) } / a:t_primary() lbrak:lbrak() s:slices() rbrak:rbrak() !t_lookahead() { - AssignTargetExpression::Subscript(Box::new( + AssignTargetExpression::Subscript( make_subscript(a, lbrak, s, rbrak) - )) + ) } #[cache_left_rec] - rule t_primary() -> Expression<'input, 'a> + rule t_primary() -> Expression<'a> = value:t_primary() dot:lit(".") attr:name() &t_lookahead() { - Expression::Attribute(Box::new(make_attribute(value, dot, attr))) + Expression::Attribute(make_attribute(value, dot, attr)) } / v:t_primary() l:lbrak() s:slices() r:rbrak() &t_lookahead() { - Expression::Subscript(Box::new(make_subscript(v, l, s, r))) + Expression::Subscript(make_subscript(v, l, s, r)) } / f:t_primary() gen:genexp() &t_lookahead() { - Expression::Call(Box::new(make_genexp_call(f, gen))) + Expression::Call(make_genexp_call(f, gen)) } / f:t_primary() lpar:lit("(") arg:arguments()? rpar:lit(")") &t_lookahead() { - Expression::Call(Box::new(make_call(f, lpar, arg.unwrap_or_default(), rpar))) + Expression::Call(make_call(f, lpar, arg.unwrap_or_default(), rpar)) } / a:atom() &t_lookahead() {a} @@ -1421,137 +1342,109 @@ parser! { // Targets for del statements - rule del_targets() -> Vec> + rule del_targets() -> Vec> = t:separated_trailer(, ) { comma_separate(t.0, t.1, t.2) } - rule del_target() -> DelTargetExpression<'input, 'a> + rule del_target() -> DelTargetExpression<'a> = a:t_primary() d:lit(".") n:name() !t_lookahead() { - DelTargetExpression::Attribute(Box::new(make_attribute(a, d, n))) + DelTargetExpression::Attribute(make_attribute(a, d, n)) } / a:t_primary() lbrak:lbrak() s:slices() rbrak:rbrak() !t_lookahead() { - DelTargetExpression::Subscript(Box::new( + DelTargetExpression::Subscript( make_subscript(a, lbrak, s, rbrak) - )) + ) } / del_t_atom() - rule del_t_atom() -> DelTargetExpression<'input, 'a> - = n:name() { DelTargetExpression::Name(Box::new(n)) } + rule del_t_atom() -> DelTargetExpression<'a> + = n:name() { DelTargetExpression::Name(n) } / l:lpar() d:del_target() r:rpar() { d.with_parens(l, r) } / l:lpar() d:del_targets()? r:rpar() { make_del_tuple(Some(l), d.unwrap_or_default(), Some(r)) } / l:lbrak() d:del_targets()? r:rbrak() { - DelTargetExpression::List(Box::new( + DelTargetExpression::List( make_list(l, d.unwrap_or_default(), r) - )) + ) } // F-strings - rule fstring() -> FormattedString<'input, 'a> + rule fstring() -> FormattedString<'a> = start:tok(FStringStart, "f\"") parts:(_f_string() / _f_replacement())* end:tok(FStringEnd, "\"") { make_fstring(start.string, parts, end.string) } - rule _f_string() -> FormattedStringContent<'input, 'a> + rule _f_string() -> FormattedStringContent<'a> = t:tok(FStringString, "f-string contents") { - FormattedStringContent::Text(make_fstringtext(t.string)) + FormattedStringContent::Text(FormattedStringText { value: t.string }) } - rule _f_replacement() -> FormattedStringContent<'input, 'a> + rule _f_replacement() -> FormattedStringContent<'a> = lb:lit("{") e:_f_expr() eq:lit("=")? conv:(t:lit("!") c:_f_conversion() {(t,c)})? spec:(t:lit(":") s:_f_spec() {(t,s)})? rb:lit("}") { - FormattedStringContent::Expression(Box::new( + FormattedStringContent::Expression( make_fstring_expression(lb, e, eq, conv, spec, rb) - )) + ) } - rule _f_expr() -> Expression<'input, 'a> - = (g:_bare_genexp() {Expression::GeneratorExp(Box::new(g))}) - / star_expressions() + rule _f_expr() -> Expression<'a> + = (g:_bare_genexp() {Expression::GeneratorExp(g)}) + / _conditional_expression() / yield_expr() rule _f_conversion() -> &'a str = lit("r") {"r"} / lit("s") {"s"} / lit("a") {"a"} - rule _f_spec() -> Vec> + rule _f_spec() -> Vec> = (_f_string() / _f_replacement())* - // T-strings - - rule tstring() -> TemplatedString<'input, 'a> - = start:tok(TStringStart, "t\"") - parts:(_t_string() / _t_replacement())* - end:tok(TStringEnd, "\"") { - make_tstring(start.string, parts, end.string) - } - - rule _t_string() -> TemplatedStringContent<'input, 'a> - = t:tok(TStringString, "t-string contents") { - TemplatedStringContent::Text(make_tstringtext(t.string)) - } - - - rule _t_replacement() -> TemplatedStringContent<'input, 'a> - = lb:lit("{") e:annotated_rhs() eq:lit("=")? - conv:(t:lit("!") c:_f_conversion() {(t,c)})? - spec:(t:lit(":") s:_t_spec() {(t,s)})? - rb:lit("}") { - TemplatedStringContent::Expression(Box::new( - make_tstring_expression(lb, e, eq, conv, spec, rb) - )) - } - - rule _t_spec() -> Vec> - = (_t_string() / _t_replacement())* - // CST helpers - rule comma() -> Comma<'input, 'a> + rule comma() -> Comma<'a> = c:lit(",") { make_comma(c) } - rule dots() -> Vec> + rule dots() -> Vec> = ds:((dot:lit(".") { make_dot(dot) })+ / tok:lit("...") { - vec![make_dot(tok), make_dot(tok), make_dot(tok)]} + vec![make_dot(tok.clone()), make_dot(tok.clone()), make_dot(tok.clone())]} )+ { ds.into_iter().flatten().collect() } - rule lpar() -> LeftParen<'input, 'a> + rule lpar() -> LeftParen<'a> = a:lit("(") { make_lpar(a) } - rule rpar() -> RightParen<'input, 'a> + rule rpar() -> RightParen<'a> = a:lit(")") { make_rpar(a) } - rule lbrak() -> LeftSquareBracket<'input, 'a> + rule lbrak() -> LeftSquareBracket<'a> = tok:lit("[") { make_left_bracket(tok) } - rule rbrak() -> RightSquareBracket<'input, 'a> + rule rbrak() -> RightSquareBracket<'a> = tok:lit("]") { make_right_bracket(tok) } - rule lbrace() -> LeftCurlyBrace<'input, 'a> + rule lbrace() -> LeftCurlyBrace<'a> = tok:lit("{") { make_left_brace(tok) } - rule rbrace() -> RightCurlyBrace<'input, 'a> + rule rbrace() -> RightCurlyBrace<'a> = tok:lit("}") { make_right_brace(tok) } /// matches any token, not just whitespace - rule _() -> TokenRef<'input, 'a> + rule _() -> TokenRef<'a> = [t] { t } - rule lit(lit: &'static str) -> TokenRef<'input, 'a> + rule lit(lit: &'static str) -> TokenRef<'a> = [t] {? if t.string == lit { Ok(t) } else { Err(lit) } } - rule tok(tok: TokType, err: &'static str) -> TokenRef<'input, 'a> + rule tok(tok: TokType, err: &'static str) -> TokenRef<'a> = [t] {? if t.r#type == tok { Ok(t) } else { Err(err) } } - rule name() -> Name<'input, 'a> + rule name() -> Name<'a> = !( lit("False") / lit("None") / lit("True") / lit("and") / lit("as") / lit("assert") / lit("async") / lit("await") / lit("break") / lit("class") / lit("continue") / lit("def") / lit("del") / lit("elif") / lit("else") / lit("except") / lit("finally") / lit("for") / lit("from") / lit("global") / lit("if") / lit("import") @@ -1560,7 +1453,7 @@ parser! { ) t:tok(NameTok, "NAME") {make_name(t)} - rule _async() -> TokenRef<'input, 'a> + rule _async() -> TokenRef<'a> = tok(Async, "ASYNC") rule separated_trailer(el: rule, sep: rule) -> (El, Vec<(Sep, El)>, Option) @@ -1569,46 +1462,52 @@ parser! { rule separated(el: rule, sep: rule) -> (El, Vec<(Sep, El)>) = e:el() rest:(s:sep() e:el() {(s, e)})* {(e, rest)} - rule traced(e: rule) -> T = - &(_* { + rule traced(e: rule) -> T = + &(_* { #[cfg(feature = "trace")] { println!("[PEG_INPUT_START]"); println!("{}", input); println!("[PEG_TRACE_START]"); } - }) - e:e()? {? + }) + e:e()? {? #[cfg(feature = "trace")] - println!("[PEG_TRACE_STOP]"); - e.ok_or("") - } + println!("[PEG_TRACE_STOP]"); + e.ok_or("") + } - } + } } #[allow(clippy::too_many_arguments)] -fn make_function_def<'input, 'a>( - async_tok: Option>, - def_tok: TokenRef<'input, 'a>, - name: Name<'input, 'a>, - type_parameters: Option>, - open_paren_tok: TokenRef<'input, 'a>, - params: Option>, - close_paren_tok: TokenRef<'input, 'a>, - returns: Option>, - colon_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, -) -> FunctionDef<'input, 'a> { - let asynchronous = async_tok.as_ref().map(|_| make_async()); +fn make_function_def<'a>( + async_tok: Option>, + def_tok: TokenRef<'a>, + name: Name<'a>, + open_paren_tok: TokenRef<'a>, + params: Option>, + close_paren_tok: TokenRef<'a>, + returns: Option>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> FunctionDef<'a> { + let asynchronous = async_tok.as_ref().map(|_| Asynchronous { + whitespace_after: Default::default(), + }); FunctionDef { name, - type_parameters, params: params.unwrap_or_default(), body, decorators: Default::default(), returns, asynchronous, + leading_lines: Default::default(), + lines_after_decorators: vec![], + whitespace_after_def: Default::default(), + whitespace_after_name: Default::default(), + whitespace_before_colon: Default::default(), + whitespace_before_params: Default::default(), async_tok, def_tok, open_paren_tok, @@ -1617,22 +1516,25 @@ fn make_function_def<'input, 'a>( } } -fn make_decorator<'input, 'a>( - at_tok: TokenRef<'input, 'a>, - name: Expression<'input, 'a>, - newline_tok: TokenRef<'input, 'a>, -) -> Decorator<'input, 'a> { +fn make_decorator<'a>( + at_tok: TokenRef<'a>, + name: Expression<'a>, + newline_tok: TokenRef<'a>, +) -> Decorator<'a> { Decorator { decorator: name, + leading_lines: Default::default(), + whitespace_after_at: Default::default(), + trailing_whitespace: Default::default(), newline_tok, at_tok, } } -fn make_comparison<'input, 'a>( - head: Expression<'input, 'a>, - tail: Vec<(CompOp<'input, 'a>, Expression<'input, 'a>)>, -) -> Expression<'input, 'a> { +fn make_comparison<'a>( + head: Expression<'a>, + tail: Vec<(CompOp<'a>, Expression<'a>)>, +) -> Expression<'a> { let mut comparisons = vec![]; for (operator, e) in tail { comparisons.push(ComparisonTarget { @@ -1640,40 +1542,82 @@ fn make_comparison<'input, 'a>( comparator: e, }); } - Expression::Comparison(Box::new(Comparison { + Expression::Comparison(Comparison { left: Box::new(head), comparisons, lpar: vec![], rpar: vec![], - })) + }) } -fn make_comparison_operator<'input, 'a>( - tok: TokenRef<'input, 'a>, -) -> Result<'a, CompOp<'input, 'a>> { +fn make_comparison_operator(tok: TokenRef) -> Result { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); match tok.string { - "<" => Ok(CompOp::LessThan { tok }), - ">" => Ok(CompOp::GreaterThan { tok }), - "<=" => Ok(CompOp::LessThanEqual { tok }), - ">=" => Ok(CompOp::GreaterThanEqual { tok }), - "==" => Ok(CompOp::Equal { tok }), - "!=" => Ok(CompOp::NotEqual { tok }), - "in" => Ok(CompOp::In { tok }), - "is" => Ok(CompOp::Is { tok }), + "<" => Ok(CompOp::LessThan { + whitespace_after, + whitespace_before, + tok, + }), + ">" => Ok(CompOp::GreaterThan { + whitespace_after, + whitespace_before, + tok, + }), + "<=" => Ok(CompOp::LessThanEqual { + whitespace_after, + whitespace_before, + tok, + }), + ">=" => Ok(CompOp::GreaterThanEqual { + whitespace_after, + whitespace_before, + tok, + }), + "==" => Ok(CompOp::Equal { + whitespace_after, + whitespace_before, + tok, + }), + "!=" => Ok(CompOp::NotEqual { + whitespace_after, + whitespace_before, + tok, + }), + "in" => Ok(CompOp::In { + whitespace_after, + whitespace_before, + tok, + }), + "is" => Ok(CompOp::Is { + whitespace_after, + whitespace_before, + tok, + }), _ => Err(ParserError::OperatorError), } } -fn make_comparison_operator_2<'input, 'a>( - first: TokenRef<'input, 'a>, - second: TokenRef<'input, 'a>, -) -> Result<'a, CompOp<'input, 'a>> { +fn make_comparison_operator_2<'a>( + first: TokenRef<'a>, + second: TokenRef<'a>, +) -> Result<'a, CompOp<'a>> { + let whitespace_before = Default::default(); + let whitespace_between = Default::default(); + let whitespace_after = Default::default(); + match (first.string, second.string) { ("is", "not") => Ok(CompOp::IsNot { + whitespace_before, + whitespace_between, + whitespace_after, is_tok: first, not_tok: second, }), ("not", "in") => Ok(CompOp::NotIn { + whitespace_before, + whitespace_between, + whitespace_after, not_tok: first, in_tok: second, }), @@ -1681,132 +1625,207 @@ fn make_comparison_operator_2<'input, 'a>( } } -fn make_boolean_op<'input, 'a>( - head: Expression<'input, 'a>, - tail: Vec<(TokenRef<'input, 'a>, Expression<'input, 'a>)>, -) -> Result<'a, Expression<'input, 'a>> { +fn make_boolean_op<'a>( + head: Expression<'a>, + tail: Vec<(TokenRef<'a>, Expression<'a>)>, +) -> Result<'a, Expression<'a>> { if tail.is_empty() { return Ok(head); } let mut expr = head; for (tok, right) in tail { - expr = Expression::BooleanOperation(Box::new(BooleanOperation { + expr = Expression::BooleanOperation(BooleanOperation { left: Box::new(expr), operator: make_boolean_operator(tok)?, right: Box::new(right), lpar: vec![], rpar: vec![], - })) + }) } Ok(expr) } -fn make_boolean_operator<'input, 'a>( - tok: TokenRef<'input, 'a>, -) -> Result<'a, BooleanOp<'input, 'a>> { +fn make_boolean_operator(tok: TokenRef) -> Result { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); match tok.string { - "and" => Ok(BooleanOp::And { tok }), - "or" => Ok(BooleanOp::Or { tok }), + "and" => Ok(BooleanOp::And { + whitespace_after, + whitespace_before, + tok, + }), + "or" => Ok(BooleanOp::Or { + whitespace_after, + whitespace_before, + tok, + }), _ => Err(ParserError::OperatorError), } } -fn make_binary_op<'input, 'a>( - left: Expression<'input, 'a>, - op: TokenRef<'input, 'a>, - right: Expression<'input, 'a>, -) -> Result<'a, Expression<'input, 'a>> { +fn make_binary_op<'a>( + left: Expression<'a>, + op: TokenRef<'a>, + right: Expression<'a>, +) -> Result<'a, Expression<'a>> { let operator = make_binary_operator(op)?; - Ok(Expression::BinaryOperation(Box::new(BinaryOperation { + Ok(Expression::BinaryOperation(BinaryOperation { left: Box::new(left), operator, right: Box::new(right), lpar: vec![], rpar: vec![], - }))) + })) } -fn make_binary_operator<'input, 'a>(tok: TokenRef<'input, 'a>) -> Result<'a, BinaryOp<'input, 'a>> { +fn make_binary_operator(tok: TokenRef) -> Result { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); + match tok.string { - "+" => Ok(BinaryOp::Add { tok }), - "-" => Ok(BinaryOp::Subtract { tok }), - "*" => Ok(BinaryOp::Multiply { tok }), - "/" => Ok(BinaryOp::Divide { tok }), - "//" => Ok(BinaryOp::FloorDivide { tok }), - "%" => Ok(BinaryOp::Modulo { tok }), - "**" => Ok(BinaryOp::Power { tok }), - "<<" => Ok(BinaryOp::LeftShift { tok }), - ">>" => Ok(BinaryOp::RightShift { tok }), - "|" => Ok(BinaryOp::BitOr { tok }), - "&" => Ok(BinaryOp::BitAnd { tok }), - "^" => Ok(BinaryOp::BitXor { tok }), - "@" => Ok(BinaryOp::MatrixMultiply { tok }), + "+" => Ok(BinaryOp::Add { + whitespace_after, + whitespace_before, + tok, + }), + "-" => Ok(BinaryOp::Subtract { + whitespace_after, + whitespace_before, + tok, + }), + "*" => Ok(BinaryOp::Multiply { + whitespace_after, + whitespace_before, + tok, + }), + "/" => Ok(BinaryOp::Divide { + whitespace_after, + whitespace_before, + tok, + }), + "//" => Ok(BinaryOp::FloorDivide { + whitespace_after, + whitespace_before, + tok, + }), + "%" => Ok(BinaryOp::Modulo { + whitespace_after, + whitespace_before, + tok, + }), + "**" => Ok(BinaryOp::Power { + whitespace_after, + whitespace_before, + tok, + }), + "<<" => Ok(BinaryOp::LeftShift { + whitespace_after, + whitespace_before, + tok, + }), + ">>" => Ok(BinaryOp::RightShift { + whitespace_after, + whitespace_before, + tok, + }), + "|" => Ok(BinaryOp::BitOr { + whitespace_after, + whitespace_before, + tok, + }), + "&" => Ok(BinaryOp::BitAnd { + whitespace_after, + whitespace_before, + tok, + }), + "^" => Ok(BinaryOp::BitXor { + whitespace_after, + whitespace_before, + tok, + }), + "@" => Ok(BinaryOp::MatrixMultiply { + whitespace_after, + whitespace_before, + tok, + }), _ => Err(ParserError::OperatorError), } } -fn make_unary_op<'input, 'a>( - op: TokenRef<'input, 'a>, - tail: Expression<'input, 'a>, -) -> Result<'a, Expression<'input, 'a>> { +fn make_unary_op<'a>(op: TokenRef<'a>, tail: Expression<'a>) -> Result<'a, Expression<'a>> { let operator = make_unary_operator(op)?; - Ok(Expression::UnaryOperation(Box::new(UnaryOperation { + Ok(Expression::UnaryOperation(UnaryOperation { operator, expression: Box::new(tail), lpar: vec![], rpar: vec![], - }))) + })) } -fn make_unary_operator<'input, 'a>(tok: TokenRef<'input, 'a>) -> Result<'a, UnaryOp<'input, 'a>> { +fn make_unary_operator(tok: TokenRef) -> Result { + let whitespace_after = Default::default(); match tok.string { - "+" => Ok(UnaryOp::Plus { tok }), - "-" => Ok(UnaryOp::Minus { tok }), - "~" => Ok(UnaryOp::BitInvert { tok }), - "not" => Ok(UnaryOp::Not { tok }), + "+" => Ok(UnaryOp::Plus { + whitespace_after, + tok, + }), + "-" => Ok(UnaryOp::Minus { + whitespace_after, + tok, + }), + "~" => Ok(UnaryOp::BitInvert { + whitespace_after, + tok, + }), + "not" => Ok(UnaryOp::Not { + whitespace_after, + tok, + }), _ => Err(ParserError::OperatorError), } } -fn make_number<'input, 'a>(num: TokenRef<'input, 'a>) -> Expression<'input, 'a> { +fn make_number(num: TokenRef) -> Expression { super::numbers::parse_number(num.string) } -fn make_indented_block<'input, 'a>( - nl: TokenRef<'input, 'a>, - indent: TokenRef<'input, 'a>, - statements: Vec>, - dedent: TokenRef<'input, 'a>, -) -> Suite<'input, 'a> { +fn make_indented_block<'a>( + nl: TokenRef<'a>, + indent: TokenRef<'a>, + statements: Vec>, + dedent: TokenRef<'a>, +) -> Suite<'a> { Suite::IndentedBlock(IndentedBlock { body: statements, + header: Default::default(), indent: Default::default(), + footer: Default::default(), newline_tok: nl, indent_tok: indent, dedent_tok: dedent, }) } -struct SimpleStatementParts<'input, 'a> { - first_tok: TokenRef<'input, 'a>, // The first token of the first statement. Used for its whitespace - first_statement: SmallStatement<'input, 'a>, - rest: Vec<(TokenRef<'input, 'a>, SmallStatement<'input, 'a>)>, // semicolon, statement pairs - last_semi: Option>, - nl: TokenRef<'input, 'a>, +struct SimpleStatementParts<'a> { + first_tok: TokenRef<'a>, // The first token of the first statement. Used for its whitespace + first_statement: SmallStatement<'a>, + rest: Vec<(TokenRef<'a>, SmallStatement<'a>)>, // semicolon, statement pairs + last_semi: Option>, + nl: TokenRef<'a>, } -fn make_semicolon<'input, 'a>(tok: TokenRef<'input, 'a>) -> Semicolon<'input, 'a> { - Semicolon { tok } +fn make_semicolon(tok: TokenRef) -> Semicolon { + Semicolon { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } } -fn _make_simple_statement<'input, 'a>( - parts: SimpleStatementParts<'input, 'a>, -) -> ( - TokenRef<'input, 'a>, - Vec>, - TokenRef<'input, 'a>, -) { +fn _make_simple_statement( + parts: SimpleStatementParts, +) -> (TokenRef, Vec, TokenRef) { let mut body = vec![]; let mut current = parts.first_statement; @@ -1822,39 +1841,42 @@ fn _make_simple_statement<'input, 'a>( (parts.first_tok, body, parts.nl) } -fn make_simple_statement_suite<'input, 'a>( - parts: SimpleStatementParts<'input, 'a>, -) -> Suite<'input, 'a> { +fn make_simple_statement_suite(parts: SimpleStatementParts) -> Suite { let (first_tok, body, newline_tok) = _make_simple_statement(parts); Suite::SimpleStatementSuite(SimpleStatementSuite { body, + leading_whitespace: Default::default(), + trailing_whitespace: Default::default(), first_tok, newline_tok, }) } -fn make_simple_statement_line<'input, 'a>( - parts: SimpleStatementParts<'input, 'a>, -) -> SimpleStatementLine<'input, 'a> { +fn make_simple_statement_line(parts: SimpleStatementParts) -> SimpleStatementLine { let (first_tok, body, newline_tok) = _make_simple_statement(parts); SimpleStatementLine { body, + leading_lines: Default::default(), + trailing_whitespace: Default::default(), first_tok, newline_tok, } } -fn make_if<'input, 'a>( - if_tok: TokenRef<'input, 'a>, - cond: Expression<'input, 'a>, - colon_tok: TokenRef<'input, 'a>, - block: Suite<'input, 'a>, - orelse: Option>, +fn make_if<'a>( + if_tok: TokenRef<'a>, + cond: Expression<'a>, + colon_tok: TokenRef<'a>, + block: Suite<'a>, + orelse: Option>, is_elif: bool, -) -> If<'input, 'a> { +) -> If<'a> { If { + leading_lines: Default::default(), + whitespace_before_test: Default::default(), test: cond, + whitespace_after_test: Default::default(), body: block, orelse: orelse.map(Box::new), is_elif, @@ -1863,29 +1885,23 @@ fn make_if<'input, 'a>( } } -fn make_else<'input, 'a>( - else_tok: TokenRef<'input, 'a>, - colon_tok: TokenRef<'input, 'a>, - block: Suite<'input, 'a>, -) -> Else<'input, 'a> { +fn make_else<'a>(else_tok: TokenRef<'a>, colon_tok: TokenRef<'a>, block: Suite<'a>) -> Else<'a> { Else { + leading_lines: Default::default(), + whitespace_before_colon: Default::default(), body: block, else_tok, colon_tok, } } -struct StarEtc<'input, 'a>( - Option>, - Vec>, - Option>, -); +struct StarEtc<'a>(Option>, Vec>, Option>); -fn make_parameters<'input, 'a>( - posonly: Option<(Vec>, ParamSlash<'input, 'a>)>, - params: Vec>, - star_etc: Option>, -) -> Parameters<'input, 'a> { +fn make_parameters<'a>( + posonly: Option<(Vec>, ParamSlash<'a>)>, + params: Vec>, + star_etc: Option>, +) -> Parameters<'a> { let (posonly_params, posonly_ind) = match posonly { Some((a, b)) => (a, Some(b)), None => (vec![], None), @@ -1904,11 +1920,11 @@ fn make_parameters<'input, 'a>( } } -fn add_param_default<'input, 'a>( - param: Param<'input, 'a>, - def: Option<(AssignEqual<'input, 'a>, Expression<'input, 'a>)>, - comma_tok: Option>, -) -> Param<'input, 'a> { +fn add_param_default<'a>( + param: Param<'a>, + def: Option<(AssignEqual<'a>, Expression<'a>)>, + comma_tok: Option>, +) -> Param<'a> { let comma = comma_tok.map(make_comma); let (equal, default) = match def { @@ -1923,10 +1939,7 @@ fn add_param_default<'input, 'a>( } } -fn add_param_star<'input, 'a>( - param: Param<'input, 'a>, - star: TokenRef<'input, 'a>, -) -> Param<'input, 'a> { +fn add_param_star<'a>(param: Param<'a>, star: TokenRef<'a>) -> Param<'a> { let str = star.string; Param { star: Some(str), @@ -1935,78 +1948,92 @@ fn add_param_star<'input, 'a>( } } -fn make_assign_equal<'input, 'a>(tok: TokenRef<'input, 'a>) -> AssignEqual<'input, 'a> { - AssignEqual { tok } +fn make_assign_equal(tok: TokenRef) -> AssignEqual { + AssignEqual { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } } -fn make_comma<'input, 'a>(tok: TokenRef<'input, 'a>) -> Comma<'input, 'a> { - Comma { tok } +fn make_comma(tok: TokenRef) -> Comma { + Comma { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } } fn concat(a: Vec, b: Vec) -> Vec { a.into_iter().chain(b.into_iter()).collect() } -fn make_name_or_attr<'input, 'a>( - first_tok: Name<'input, 'a>, - mut tail: Vec<(TokenRef<'input, 'a>, Name<'input, 'a>)>, -) -> NameOrAttribute<'input, 'a> { +fn make_name_or_attr<'a>( + first_tok: Name<'a>, + mut tail: Vec<(TokenRef<'a>, Name<'a>)>, +) -> NameOrAttribute<'a> { if let Some((dot, name)) = tail.pop() { let dot = make_dot(dot); - return NameOrAttribute::A(Box::new(Attribute { + return NameOrAttribute::A(Attribute { attr: name, dot, lpar: Default::default(), rpar: Default::default(), value: Box::new(make_name_or_attr(first_tok, tail).into()), - })); + }); } else { - NameOrAttribute::N(Box::new(first_tok)) + NameOrAttribute::N(first_tok) } } -fn make_name<'input, 'a>(tok: TokenRef<'input, 'a>) -> Name<'input, 'a> { +fn make_name(tok: TokenRef) -> Name { Name { value: tok.string, ..Default::default() } } -fn make_dot<'input, 'a>(tok: TokenRef<'input, 'a>) -> Dot<'input, 'a> { - Dot { tok } +fn make_dot(tok: TokenRef) -> Dot { + Dot { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } } -fn make_import_alias<'input, 'a>( - name: NameOrAttribute<'input, 'a>, - asname: Option<(TokenRef<'input, 'a>, Name<'input, 'a>)>, -) -> ImportAlias<'input, 'a> { +fn make_import_alias<'a>( + name: NameOrAttribute<'a>, + asname: Option<(TokenRef<'a>, Name<'a>)>, +) -> ImportAlias<'a> { ImportAlias { name, - asname: asname.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(Box::new(y)))), + asname: asname.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(y))), comma: None, } } -fn make_as_name<'input, 'a>( - as_tok: TokenRef<'input, 'a>, - name: AssignTargetExpression<'input, 'a>, -) -> AsName<'input, 'a> { - AsName { name, as_tok } +fn make_as_name<'a>(as_tok: TokenRef<'a>, name: AssignTargetExpression<'a>) -> AsName<'a> { + AsName { + name, + whitespace_before_as: Default::default(), + whitespace_after_as: Default::default(), + as_tok, + } } -type ParenthesizedImportNames<'input, 'a> = ( - Option>, - ImportNames<'input, 'a>, - Option>, +type ParenthesizedImportNames<'a> = ( + Option>, + ImportNames<'a>, + Option>, ); -fn make_import_from<'input, 'a>( - from_tok: TokenRef<'input, 'a>, - dots: Vec>, - module: Option>, - import_tok: TokenRef<'input, 'a>, - aliases: ParenthesizedImportNames<'input, 'a>, -) -> ImportFrom<'input, 'a> { +fn make_import_from<'a>( + from_tok: TokenRef<'a>, + dots: Vec>, + module: Option>, + import_tok: TokenRef<'a>, + aliases: ParenthesizedImportNames<'a>, +) -> ImportFrom<'a> { let (lpar, names, rpar) = aliases; ImportFrom { @@ -2016,26 +2043,27 @@ fn make_import_from<'input, 'a>( lpar, rpar, semicolon: None, + whitespace_after_from: Default::default(), + whitespace_after_import: Default::default(), + whitespace_before_import: Default::default(), from_tok, import_tok, } } -fn make_import<'input, 'a>( - import_tok: TokenRef<'input, 'a>, - names: Vec>, -) -> Import<'input, 'a> { +fn make_import<'a>(import_tok: TokenRef<'a>, names: Vec>) -> Import<'a> { Import { names, + whitespace_after_import: Default::default(), semicolon: None, import_tok, } } -fn make_import_from_as_names<'input, 'a>( - first: ImportAlias<'input, 'a>, - tail: Vec<(Comma<'input, 'a>, ImportAlias<'input, 'a>)>, -) -> Vec> { +fn make_import_from_as_names<'a>( + first: ImportAlias<'a>, + tail: Vec<(Comma<'a>, ImportAlias<'a>)>, +) -> Vec> { let mut ret = vec![]; let mut cur = first; for (comma, alias) in tail { @@ -2046,21 +2074,25 @@ fn make_import_from_as_names<'input, 'a>( ret } -fn make_lpar<'input, 'a>(tok: TokenRef<'input, 'a>) -> LeftParen<'input, 'a> { - LeftParen { lpar_tok: tok } +fn make_lpar(tok: TokenRef) -> LeftParen { + LeftParen { + whitespace_after: Default::default(), + lpar_tok: tok, + } } -fn make_rpar<'input, 'a>(tok: TokenRef<'input, 'a>) -> RightParen<'input, 'a> { - RightParen { rpar_tok: tok } +fn make_rpar(tok: TokenRef) -> RightParen { + RightParen { + whitespace_before: Default::default(), + rpar_tok: tok, + } } -fn make_module<'input, 'a>( - body: Vec>, - tok: TokenRef<'input, 'a>, - encoding: &str, -) -> Module<'input, 'a> { +fn make_module<'a>(body: Vec>, tok: TokenRef<'a>, encoding: &str) -> Module<'a> { Module { body, + header: Default::default(), + footer: Default::default(), eof_tok: tok, default_indent: " ", default_newline: "\n", @@ -2069,11 +2101,7 @@ fn make_module<'input, 'a>( } } -fn make_attribute<'input, 'a>( - value: Expression<'input, 'a>, - dot: TokenRef<'input, 'a>, - attr: Name<'input, 'a>, -) -> Attribute<'input, 'a> { +fn make_attribute<'a>(value: Expression<'a>, dot: TokenRef<'a>, attr: Name<'a>) -> Attribute<'a> { let dot = make_dot(dot); Attribute { attr, @@ -2084,16 +2112,14 @@ fn make_attribute<'input, 'a>( } } -fn make_starred_element<'input, 'a>( - star_tok: TokenRef<'input, 'a>, - rest: Element<'input, 'a>, -) -> StarredElement<'input, 'a> { +fn make_starred_element<'a>(star_tok: TokenRef<'a>, rest: Element<'a>) -> StarredElement<'a> { let value = match rest { Element::Simple { value, .. } => value, _ => panic!("Internal error while making starred element"), }; StarredElement { value: Box::new(value), + whitespace_before_value: Default::default(), lpar: Default::default(), rpar: Default::default(), comma: Default::default(), @@ -2101,9 +2127,7 @@ fn make_starred_element<'input, 'a>( } } -fn assign_target_to_element<'input, 'a>( - expr: AssignTargetExpression<'input, 'a>, -) -> Element<'input, 'a> { +fn assign_target_to_element(expr: AssignTargetExpression) -> Element { match expr { AssignTargetExpression::Attribute(a) => Element::Simple { value: Expression::Attribute(a), @@ -2129,13 +2153,18 @@ fn assign_target_to_element<'input, 'a>( } } -fn make_assignment<'input, 'a>( - lhs: Vec<(AssignTargetExpression<'input, 'a>, TokenRef<'input, 'a>)>, - rhs: Expression<'input, 'a>, -) -> Assign<'input, 'a> { +fn make_assignment<'a>( + lhs: Vec<(AssignTargetExpression<'a>, TokenRef<'a>)>, + rhs: Expression<'a>, +) -> Assign<'a> { let mut targets = vec![]; for (target, equal_tok) in lhs { - targets.push(AssignTarget { target, equal_tok }); + targets.push(AssignTarget { + target, + whitespace_before_equal: Default::default(), + whitespace_after_equal: Default::default(), + equal_tok, + }); } Assign { targets, @@ -2144,23 +2173,20 @@ fn make_assignment<'input, 'a>( } } -fn expr_to_element<'input, 'a>(expr: Expression<'input, 'a>) -> Element<'input, 'a> { - match expr { - Expression::StarredElement(inner_expr) => Element::Starred(inner_expr), - _ => Element::Simple { - value: expr, - comma: Default::default(), - }, +fn expr_to_element(expr: Expression) -> Element { + Element::Simple { + value: expr, + comma: Default::default(), } } -fn make_tuple<'input, 'a>( - first: Element<'input, 'a>, - rest: Vec<(Comma<'input, 'a>, Element<'input, 'a>)>, - trailing_comma: Option>, - lpar: Option>, - rpar: Option>, -) -> Tuple<'input, 'a> { +fn make_tuple<'a>( + first: Element<'a>, + rest: Vec<(Comma<'a>, Element<'a>)>, + trailing_comma: Option>, + lpar: Option>, + rpar: Option>, +) -> Tuple<'a> { let elements = comma_separate(first, rest, trailing_comma); let lpar = lpar.map(|l| vec![l]).unwrap_or_default(); @@ -2173,10 +2199,7 @@ fn make_tuple<'input, 'a>( } } -fn make_tuple_from_elements<'input, 'a>( - first: Element<'input, 'a>, - mut rest: Vec>, -) -> Tuple<'input, 'a> { +fn make_tuple_from_elements<'a>(first: Element<'a>, mut rest: Vec>) -> Tuple<'a> { rest.insert(0, first); Tuple { elements: rest, @@ -2185,11 +2208,7 @@ fn make_tuple_from_elements<'input, 'a>( } } -fn make_kwarg<'input, 'a>( - name: Name<'input, 'a>, - eq: TokenRef<'input, 'a>, - value: Expression<'input, 'a>, -) -> Arg<'input, 'a> { +fn make_kwarg<'a>(name: Name<'a>, eq: TokenRef<'a>, value: Expression<'a>) -> Arg<'a> { let equal = Some(make_assign_equal(eq)); let keyword = Some(name); Arg { @@ -2198,14 +2217,13 @@ fn make_kwarg<'input, 'a>( equal, comma: None, star: "", + whitespace_after_star: Default::default(), + whitespace_after_arg: Default::default(), star_tok: None, } } -fn make_star_arg<'input, 'a>( - star: TokenRef<'input, 'a>, - expr: Expression<'input, 'a>, -) -> Arg<'input, 'a> { +fn make_star_arg<'a>(star: TokenRef<'a>, expr: Expression<'a>) -> Arg<'a> { let str = star.string; Arg { value: expr, @@ -2213,16 +2231,18 @@ fn make_star_arg<'input, 'a>( equal: None, comma: None, star: str, + whitespace_after_star: Default::default(), + whitespace_after_arg: Default::default(), star_tok: Some(star), } } -fn make_call<'input, 'a>( - func: Expression<'input, 'a>, - lpar_tok: TokenRef<'input, 'a>, - args: Vec>, - rpar_tok: TokenRef<'input, 'a>, -) -> Call<'input, 'a> { +fn make_call<'a>( + func: Expression<'a>, + lpar_tok: TokenRef<'a>, + args: Vec>, + rpar_tok: TokenRef<'a>, +) -> Call<'a> { let lpar = vec![]; let rpar = vec![]; let func = Box::new(func); @@ -2232,15 +2252,14 @@ fn make_call<'input, 'a>( args, lpar, rpar, + whitespace_after_func: Default::default(), + whitespace_before_args: Default::default(), lpar_tok, rpar_tok, } } -fn make_genexp_call<'input, 'a>( - func: Expression<'input, 'a>, - mut genexp: GeneratorExp<'input, 'a>, -) -> Call<'input, 'a> { +fn make_genexp_call<'a>(func: Expression<'a>, mut genexp: GeneratorExp<'a>) -> Call<'a> { // func ( (genexp) ) // ^ // lpar_tok @@ -2258,48 +2277,58 @@ fn make_genexp_call<'input, 'a>( Call { func: Box::new(func), args: vec![Arg { - value: Expression::GeneratorExp(Box::new(genexp)), + value: Expression::GeneratorExp(genexp), keyword: None, equal: None, comma: None, star: "", + whitespace_after_star: Default::default(), + whitespace_after_arg: Default::default(), star_tok: None, }], lpar: vec![], rpar: vec![], + whitespace_after_func: Default::default(), + whitespace_before_args: Default::default(), lpar_tok, rpar_tok, } } -fn make_arg<'input, 'a>(expr: Expression<'input, 'a>) -> Arg<'input, 'a> { +fn make_arg(expr: Expression) -> Arg { Arg { value: expr, keyword: Default::default(), equal: Default::default(), comma: Default::default(), star: Default::default(), + whitespace_after_star: Default::default(), + whitespace_after_arg: Default::default(), star_tok: None, } } -fn make_comp_if<'input, 'a>( - if_tok: TokenRef<'input, 'a>, - test: Expression<'input, 'a>, -) -> CompIf<'input, 'a> { - CompIf { test, if_tok } +fn make_comp_if<'a>(if_tok: TokenRef<'a>, test: Expression<'a>) -> CompIf<'a> { + CompIf { + test, + whitespace_before: Default::default(), + whitespace_before_test: Default::default(), + if_tok, + } } -fn make_for_if<'input, 'a>( - async_tok: Option>, - for_tok: TokenRef<'input, 'a>, - target: AssignTargetExpression<'input, 'a>, - in_tok: TokenRef<'input, 'a>, - iter: Expression<'input, 'a>, - ifs: Vec>, -) -> CompFor<'input, 'a> { +fn make_for_if<'a>( + async_tok: Option>, + for_tok: TokenRef<'a>, + target: AssignTargetExpression<'a>, + in_tok: TokenRef<'a>, + iter: Expression<'a>, + ifs: Vec>, +) -> CompFor<'a> { let inner_for_in = None; - let asynchronous = async_tok.as_ref().map(|_| make_async()); + let asynchronous = async_tok.as_ref().map(|_| Asynchronous { + whitespace_after: Default::default(), + }); CompFor { target, @@ -2307,16 +2336,17 @@ fn make_for_if<'input, 'a>( ifs, inner_for_in, asynchronous, + whitespace_before: Default::default(), + whitespace_after_for: Default::default(), + whitespace_before_in: Default::default(), + whitespace_after_in: Default::default(), async_tok, for_tok, in_tok, } } -fn make_bare_genexp<'input, 'a>( - elt: Expression<'input, 'a>, - for_in: CompFor<'input, 'a>, -) -> GeneratorExp<'input, 'a> { +fn make_bare_genexp<'a>(elt: Expression<'a>, for_in: CompFor<'a>) -> GeneratorExp<'a> { GeneratorExp { elt: Box::new(elt), for_in: Box::new(for_in), @@ -2325,43 +2355,50 @@ fn make_bare_genexp<'input, 'a>( } } -fn merge_comp_fors<'input, 'a>( - comp_fors: Vec>, -) -> GrammarResult> { - if comp_fors.len() > MAX_RECURSION_DEPTH { - return Err("shallower comprehension"); - } +fn merge_comp_fors(comp_fors: Vec) -> CompFor { let mut it = comp_fors.into_iter().rev(); let first = it.next().expect("cant merge empty comp_fors"); - Ok(it.fold(first, |acc, curr| CompFor { + it.fold(first, |acc, curr| CompFor { inner_for_in: Some(Box::new(acc)), ..curr - })) + }) } -fn make_left_bracket<'input, 'a>(tok: TokenRef<'input, 'a>) -> LeftSquareBracket<'input, 'a> { - LeftSquareBracket { tok } +fn make_left_bracket(tok: TokenRef) -> LeftSquareBracket { + LeftSquareBracket { + whitespace_after: Default::default(), + tok, + } } -fn make_right_bracket<'input, 'a>(tok: TokenRef<'input, 'a>) -> RightSquareBracket<'input, 'a> { - RightSquareBracket { tok } +fn make_right_bracket(tok: TokenRef) -> RightSquareBracket { + RightSquareBracket { + whitespace_before: Default::default(), + tok, + } } -fn make_left_brace<'input, 'a>(tok: TokenRef<'input, 'a>) -> LeftCurlyBrace<'input, 'a> { - LeftCurlyBrace { tok } +fn make_left_brace(tok: TokenRef) -> LeftCurlyBrace { + LeftCurlyBrace { + whitespace_after: Default::default(), + tok, + } } -fn make_right_brace<'input, 'a>(tok: TokenRef<'input, 'a>) -> RightCurlyBrace<'input, 'a> { - RightCurlyBrace { tok } +fn make_right_brace(tok: TokenRef) -> RightCurlyBrace { + RightCurlyBrace { + whitespace_before: Default::default(), + tok, + } } -fn make_list_comp<'input, 'a>( - lbracket: LeftSquareBracket<'input, 'a>, - elt: Expression<'input, 'a>, - for_in: CompFor<'input, 'a>, - rbracket: RightSquareBracket<'input, 'a>, -) -> ListComp<'input, 'a> { +fn make_list_comp<'a>( + lbracket: LeftSquareBracket<'a>, + elt: Expression<'a>, + for_in: CompFor<'a>, + rbracket: RightSquareBracket<'a>, +) -> ListComp<'a> { ListComp { elt: Box::new(elt), for_in: Box::new(for_in), @@ -2372,12 +2409,12 @@ fn make_list_comp<'input, 'a>( } } -fn make_set_comp<'input, 'a>( - lbrace: LeftCurlyBrace<'input, 'a>, - elt: Expression<'input, 'a>, - for_in: CompFor<'input, 'a>, - rbrace: RightCurlyBrace<'input, 'a>, -) -> SetComp<'input, 'a> { +fn make_set_comp<'a>( + lbrace: LeftCurlyBrace<'a>, + elt: Expression<'a>, + for_in: CompFor<'a>, + rbrace: RightCurlyBrace<'a>, +) -> SetComp<'a> { SetComp { elt: Box::new(elt), for_in: Box::new(for_in), @@ -2388,16 +2425,12 @@ fn make_set_comp<'input, 'a>( } } -fn make_dict_comp<'input, 'a>( - lbrace: LeftCurlyBrace<'input, 'a>, - kvpair: ( - Expression<'input, 'a>, - TokenRef<'input, 'a>, - Expression<'input, 'a>, - ), - for_in: CompFor<'input, 'a>, - rbrace: RightCurlyBrace<'input, 'a>, -) -> DictComp<'input, 'a> { +fn make_dict_comp<'a>( + lbrace: LeftCurlyBrace<'a>, + kvpair: (Expression<'a>, TokenRef<'a>, Expression<'a>), + for_in: CompFor<'a>, + rbrace: RightCurlyBrace<'a>, +) -> DictComp<'a> { let (key, colon_tok, value) = kvpair; DictComp { @@ -2408,15 +2441,17 @@ fn make_dict_comp<'input, 'a>( rbrace, lpar: vec![], rpar: vec![], + whitespace_before_colon: Default::default(), + whitespace_after_colon: Default::default(), colon_tok, } } -fn make_list<'input, 'a>( - lbracket: LeftSquareBracket<'input, 'a>, - elements: Vec>, - rbracket: RightSquareBracket<'input, 'a>, -) -> List<'input, 'a> { +fn make_list<'a>( + lbracket: LeftSquareBracket<'a>, + elements: Vec>, + rbracket: RightSquareBracket<'a>, +) -> List<'a> { List { elements, lbracket, @@ -2426,11 +2461,11 @@ fn make_list<'input, 'a>( } } -fn make_set<'input, 'a>( - lbrace: LeftCurlyBrace<'input, 'a>, - elements: Vec>, - rbrace: RightCurlyBrace<'input, 'a>, -) -> Set<'input, 'a> { +fn make_set<'a>( + lbrace: LeftCurlyBrace<'a>, + elements: Vec>, + rbrace: RightCurlyBrace<'a>, +) -> Set<'a> { Set { elements, lbrace, @@ -2440,13 +2475,13 @@ fn make_set<'input, 'a>( } } -fn comma_separate<'input, 'a, T>( +fn comma_separate<'a, T>( first: T, - rest: Vec<(Comma<'input, 'a>, T)>, - trailing_comma: Option>, + rest: Vec<(Comma<'a>, T)>, + trailing_comma: Option>, ) -> Vec where - T: WithComma<'input, 'a>, + T: WithComma<'a>, { let mut elements = vec![]; let mut current = first; @@ -2461,11 +2496,11 @@ where elements } -fn make_dict<'input, 'a>( - lbrace: LeftCurlyBrace<'input, 'a>, - elements: Vec>, - rbrace: RightCurlyBrace<'input, 'a>, -) -> Dict<'input, 'a> { +fn make_dict<'a>( + lbrace: LeftCurlyBrace<'a>, + elements: Vec>, + rbrace: RightCurlyBrace<'a>, +) -> Dict<'a> { Dict { elements, lbrace, @@ -2475,97 +2510,90 @@ fn make_dict<'input, 'a>( } } -fn make_double_starred_keypairs<'input, 'a>( - first: DictElement<'input, 'a>, - rest: Vec<(Comma<'input, 'a>, DictElement<'input, 'a>)>, - trailing_comma: Option>, -) -> Vec> { +fn make_double_starred_keypairs<'a>( + first: DictElement<'a>, + rest: Vec<(Comma<'a>, DictElement<'a>)>, + trailing_comma: Option>, +) -> Vec> { let mut elements = vec![]; let mut current = first; for (comma, next) in rest { elements.push(current.with_comma(comma)); current = next; } - if let Some(comma) = trailing_comma { + if let Some(mut comma) = trailing_comma { + // don't consume trailing whitespace for trailing comma + comma.whitespace_after = ParenthesizableWhitespace::SimpleWhitespace(SimpleWhitespace("")); current = current.with_comma(comma); } elements.push(current); elements } -fn make_dict_element<'input, 'a>( - el: ( - Expression<'input, 'a>, - TokenRef<'input, 'a>, - Expression<'input, 'a>, - ), -) -> DictElement<'input, 'a> { +fn make_dict_element<'a>(el: (Expression<'a>, TokenRef<'a>, Expression<'a>)) -> DictElement<'a> { let (key, colon_tok, value) = el; DictElement::Simple { key, value, comma: Default::default(), + whitespace_before_colon: Default::default(), + whitespace_after_colon: Default::default(), colon_tok, } } -fn make_double_starred_element<'input, 'a>( - star_tok: TokenRef<'input, 'a>, - value: Expression<'input, 'a>, -) -> StarredDictElement<'input, 'a> { +fn make_double_starred_element<'a>( + star_tok: TokenRef<'a>, + value: Expression<'a>, +) -> StarredDictElement<'a> { StarredDictElement { value, comma: Default::default(), + whitespace_before_value: Default::default(), star_tok, } } -fn make_index<'input, 'a>(value: Expression<'input, 'a>) -> BaseSlice<'input, 'a> { - BaseSlice::Index(Box::new(Index { - value, - star: None, - star_tok: None, - })) +fn make_index(value: Expression) -> BaseSlice { + BaseSlice::Index(Index { value }) } -fn make_index_from_arg<'input, 'a>(arg: Arg<'input, 'a>) -> BaseSlice<'input, 'a> { - BaseSlice::Index(Box::new(Index { - value: arg.value, - star: Some(arg.star), - star_tok: arg.star_tok, - })) +fn make_colon(tok: TokenRef) -> Colon { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); + Colon { + whitespace_before, + whitespace_after, + tok, + } } -fn make_colon<'input, 'a>(tok: TokenRef<'input, 'a>) -> Colon<'input, 'a> { - Colon { tok } -} - -fn make_slice<'input, 'a>( - lower: Option>, - first_colon: TokenRef<'input, 'a>, - upper: Option>, - rest: Option<(TokenRef<'input, 'a>, Option>)>, -) -> BaseSlice<'input, 'a> { +fn make_slice<'a>( + lower: Option>, + first_colon: TokenRef<'a>, + upper: Option>, + rest: Option<(TokenRef<'a>, Option>)>, +) -> BaseSlice<'a> { let first_colon = make_colon(first_colon); let (second_colon, step) = if let Some((tok, step)) = rest { (Some(make_colon(tok)), step) } else { (None, None) }; - BaseSlice::Slice(Box::new(Slice { + BaseSlice::Slice(Slice { lower, upper, step, first_colon, second_colon, - })) + }) } -fn make_slices<'input, 'a>( - first: BaseSlice<'input, 'a>, - rest: Vec<(Comma<'input, 'a>, BaseSlice<'input, 'a>)>, - trailing_comma: Option>, -) -> Vec> { +fn make_slices<'a>( + first: BaseSlice<'a>, + rest: Vec<(Comma<'a>, BaseSlice<'a>)>, + trailing_comma: Option>, +) -> Vec> { let mut elements = vec![]; let mut current = first; for (comma, next) in rest { @@ -2582,12 +2610,13 @@ fn make_slices<'input, 'a>( elements } -fn make_subscript<'input, 'a>( - value: Expression<'input, 'a>, - lbracket: LeftSquareBracket<'input, 'a>, - slice: Vec>, - rbracket: RightSquareBracket<'input, 'a>, -) -> Subscript<'input, 'a> { +fn make_subscript<'a>( + value: Expression<'a>, + lbracket: LeftSquareBracket<'a>, + slice: Vec>, + rbracket: RightSquareBracket<'a>, +) -> Subscript<'a> { + let lbracket_tok = lbracket.tok.clone(); Subscript { value: Box::new(value), slice, @@ -2595,31 +2624,37 @@ fn make_subscript<'input, 'a>( rbracket, lpar: Default::default(), rpar: Default::default(), + whitespace_after_value: Default::default(), + lbracket_tok, } } -fn make_ifexp<'input, 'a>( - body: Expression<'input, 'a>, - if_tok: TokenRef<'input, 'a>, - test: Expression<'input, 'a>, - else_tok: TokenRef<'input, 'a>, - orelse: Expression<'input, 'a>, -) -> IfExp<'input, 'a> { +fn make_ifexp<'a>( + body: Expression<'a>, + if_tok: TokenRef<'a>, + test: Expression<'a>, + else_tok: TokenRef<'a>, + orelse: Expression<'a>, +) -> IfExp<'a> { IfExp { test: Box::new(test), body: Box::new(body), orelse: Box::new(orelse), lpar: Default::default(), rpar: Default::default(), + whitespace_before_if: Default::default(), + whitespace_after_if: Default::default(), + whitespace_before_else: Default::default(), + whitespace_after_else: Default::default(), if_tok, else_tok, } } -fn add_arguments_trailing_comma<'input, 'a>( - mut args: Vec>, - trailing_comma: Option>, -) -> Vec> { +fn add_arguments_trailing_comma<'a>( + mut args: Vec>, + trailing_comma: Option>, +) -> Vec> { if let Some(comma) = trailing_comma { let last = args.pop().unwrap(); args.push(last.with_comma(comma)); @@ -2627,12 +2662,12 @@ fn add_arguments_trailing_comma<'input, 'a>( args } -fn make_lambda<'input, 'a>( - lambda_tok: TokenRef<'input, 'a>, - params: Parameters<'input, 'a>, - colon_tok: TokenRef<'input, 'a>, - expr: Expression<'input, 'a>, -) -> Lambda<'input, 'a> { +fn make_lambda<'a>( + lambda_tok: TokenRef<'a>, + params: Parameters<'a>, + colon_tok: TokenRef<'a>, + expr: Expression<'a>, +) -> Lambda<'a> { let colon = make_colon(colon_tok); Lambda { params: Box::new(params), @@ -2640,26 +2675,26 @@ fn make_lambda<'input, 'a>( colon, lpar: Default::default(), rpar: Default::default(), + whitespace_after_lambda: Default::default(), lambda_tok, } } -fn make_annotation<'input, 'a>( - tok: TokenRef<'input, 'a>, - ann: Expression<'input, 'a>, -) -> Annotation<'input, 'a> { +fn make_annotation<'a>(tok: TokenRef<'a>, ann: Expression<'a>) -> Annotation<'a> { Annotation { annotation: ann, + whitespace_before_indicator: Default::default(), + whitespace_after_indicator: Default::default(), tok, } } -fn make_ann_assignment<'input, 'a>( - target: AssignTargetExpression<'input, 'a>, - col: TokenRef<'input, 'a>, - ann: Expression<'input, 'a>, - rhs: Option<(TokenRef<'input, 'a>, Expression<'input, 'a>)>, -) -> AnnAssign<'input, 'a> { +fn make_ann_assignment<'a>( + target: AssignTargetExpression<'a>, + col: TokenRef<'a>, + ann: Expression<'a>, + rhs: Option<(TokenRef<'a>, Expression<'a>)>, +) -> AnnAssign<'a> { let annotation = make_annotation(col, ann); let (eq, value) = rhs.map(|(x, y)| (Some(x), Some(y))).unwrap_or((None, None)); let equal = eq.map(make_assign_equal); @@ -2672,45 +2707,49 @@ fn make_ann_assignment<'input, 'a>( } } -fn make_yield<'input, 'a>( - yield_tok: TokenRef<'input, 'a>, - f: Option>, - e: Option>, -) -> Yield<'input, 'a> { +fn make_yield<'a>( + yield_tok: TokenRef<'a>, + f: Option>, + e: Option>, +) -> Yield<'a> { let value = match (f, e) { (None, None) => None, - (Some(f), Some(e)) => Some(YieldValue::From(Box::new(make_from(f, e)))), - (None, Some(e)) => Some(YieldValue::Expression(Box::new(e))), + (Some(f), Some(e)) => Some(YieldValue::From(make_from(f, e))), + (None, Some(e)) => Some(YieldValue::Expression(e)), _ => panic!("yield from without expression"), }; Yield { value: value.map(Box::new), lpar: Default::default(), rpar: Default::default(), + whitespace_after_yield: Default::default(), yield_tok, } } -fn make_from<'input, 'a>(tok: TokenRef<'input, 'a>, e: Expression<'input, 'a>) -> From<'input, 'a> { - From { item: e, tok } +fn make_from<'a>(tok: TokenRef<'a>, e: Expression<'a>) -> From<'a> { + From { + item: e, + whitespace_before_from: Default::default(), + whitespace_after_from: Default::default(), + tok, + } } -fn make_return<'input, 'a>( - return_tok: TokenRef<'input, 'a>, - value: Option>, -) -> Return<'input, 'a> { +fn make_return<'a>(return_tok: TokenRef<'a>, value: Option>) -> Return<'a> { Return { value, + whitespace_after_return: Default::default(), semicolon: Default::default(), return_tok, } } -fn make_assert<'input, 'a>( - assert_tok: TokenRef<'input, 'a>, - test: Expression<'input, 'a>, - rest: Option<(Comma<'input, 'a>, Expression<'input, 'a>)>, -) -> Assert<'input, 'a> { +fn make_assert<'a>( + assert_tok: TokenRef<'a>, + test: Expression<'a>, + rest: Option<(Comma<'a>, Expression<'a>)>, +) -> Assert<'a> { let (comma, msg) = if let Some((c, msg)) = rest { (Some(c), Some(msg)) } else { @@ -2721,32 +2760,34 @@ fn make_assert<'input, 'a>( test, msg, comma, + whitespace_after_assert: Default::default(), semicolon: Default::default(), assert_tok, } } -fn make_raise<'input, 'a>( - raise_tok: TokenRef<'input, 'a>, - exc: Option>, - rest: Option<(TokenRef<'input, 'a>, Expression<'input, 'a>)>, -) -> Raise<'input, 'a> { +fn make_raise<'a>( + raise_tok: TokenRef<'a>, + exc: Option>, + rest: Option<(TokenRef<'a>, Expression<'a>)>, +) -> Raise<'a> { let cause = rest.map(|(t, e)| make_from(t, e)); Raise { exc, cause, + whitespace_after_raise: Default::default(), semicolon: Default::default(), raise_tok, } } -fn make_global<'input, 'a>( - tok: TokenRef<'input, 'a>, - init: Vec<(Name<'input, 'a>, Comma<'input, 'a>)>, - last: Name<'input, 'a>, -) -> Global<'input, 'a> { - let mut names: Vec> = init +fn make_global<'a>( + tok: TokenRef<'a>, + init: Vec<(Name<'a>, Comma<'a>)>, + last: Name<'a>, +) -> Global<'a> { + let mut names: Vec> = init .into_iter() .map(|(name, c)| NameItem { name, @@ -2759,17 +2800,18 @@ fn make_global<'input, 'a>( }); Global { names, + whitespace_after_global: Default::default(), semicolon: Default::default(), tok, } } -fn make_nonlocal<'input, 'a>( - tok: TokenRef<'input, 'a>, - init: Vec<(Name<'input, 'a>, Comma<'input, 'a>)>, - last: Name<'input, 'a>, -) -> Nonlocal<'input, 'a> { - let mut names: Vec> = init +fn make_nonlocal<'a>( + tok: TokenRef<'a>, + init: Vec<(Name<'a>, Comma<'a>)>, + last: Name<'a>, +) -> Nonlocal<'a> { + let mut names: Vec> = init .into_iter() .map(|(name, c)| NameItem { name, @@ -2782,23 +2824,26 @@ fn make_nonlocal<'input, 'a>( }); Nonlocal { names, + whitespace_after_nonlocal: Default::default(), semicolon: Default::default(), tok, } } #[allow(clippy::too_many_arguments)] -fn make_for<'input, 'a>( - async_tok: Option>, - for_tok: TokenRef<'input, 'a>, - target: AssignTargetExpression<'input, 'a>, - in_tok: TokenRef<'input, 'a>, - iter: Expression<'input, 'a>, - colon_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, - orelse: Option>, -) -> For<'input, 'a> { - let asynchronous = async_tok.as_ref().map(|_| make_async()); +fn make_for<'a>( + async_tok: Option>, + for_tok: TokenRef<'a>, + target: AssignTargetExpression<'a>, + in_tok: TokenRef<'a>, + iter: Expression<'a>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, + orelse: Option>, +) -> For<'a> { + let asynchronous = async_tok.as_ref().map(|_| Asynchronous { + whitespace_after: Default::default(), + }); For { target, @@ -2806,6 +2851,11 @@ fn make_for<'input, 'a>( body, orelse, asynchronous, + leading_lines: Default::default(), + whitespace_after_for: Default::default(), + whitespace_before_in: Default::default(), + whitespace_after_in: Default::default(), + whitespace_before_colon: Default::default(), async_tok, for_tok, in_tok, @@ -2813,56 +2863,50 @@ fn make_for<'input, 'a>( } } -fn make_while<'input, 'a>( - while_tok: TokenRef<'input, 'a>, - test: Expression<'input, 'a>, - colon_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, - orelse: Option>, -) -> While<'input, 'a> { +fn make_while<'a>( + while_tok: TokenRef<'a>, + test: Expression<'a>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, + orelse: Option>, +) -> While<'a> { While { test, body, orelse, + leading_lines: Default::default(), + whitespace_after_while: Default::default(), + whitespace_before_colon: Default::default(), while_tok, colon_tok, } } -fn make_await<'input, 'a>( - await_tok: TokenRef<'input, 'a>, - expression: Expression<'input, 'a>, -) -> Await<'input, 'a> { +fn make_await<'a>(await_tok: TokenRef<'a>, expression: Expression<'a>) -> Await<'a> { Await { expression: Box::new(expression), lpar: Default::default(), rpar: Default::default(), + whitespace_after_await: Default::default(), await_tok, } } -fn make_class_def<'input, 'a>( - class_tok: TokenRef<'input, 'a>, - name: Name<'input, 'a>, - type_parameters: Option>, - args: Option<( - LeftParen<'input, 'a>, - Option>>, - RightParen<'input, 'a>, - )>, - colon_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, -) -> std::result::Result, &'static str> { +fn make_class_def<'a>( + class_tok: TokenRef<'a>, + name: Name<'a>, + args: Option<(LeftParen<'a>, Option>>, RightParen<'a>)>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> std::result::Result, &'static str> { let mut bases = vec![]; let mut keywords = vec![]; - let mut lpar_tok = None; - let mut rpar_tok = None; + let mut parens_tok = None; let mut lpar = None; let mut rpar = None; if let Some((lpar_, args, rpar_)) = args { - lpar_tok = Some(lpar_.lpar_tok); - rpar_tok = Some(rpar_.rpar_tok); + parens_tok = Some((lpar_.lpar_tok.clone(), rpar_.rpar_tok.clone())); lpar = Some(lpar_); rpar = Some(rpar_); if let Some(args) = args { @@ -2885,100 +2929,54 @@ fn make_class_def<'input, 'a>( } Ok(ClassDef { name, - type_parameters, body, bases, keywords, decorators: vec![], lpar, rpar, + leading_lines: Default::default(), + lines_after_decorators: Default::default(), + whitespace_after_class: Default::default(), + whitespace_after_name: Default::default(), + whitespace_before_colon: Default::default(), class_tok, - lpar_tok, - rpar_tok, + parens_tok, colon_tok, }) } -fn make_string<'input, 'a>(tok: TokenRef<'input, 'a>) -> String<'input, 'a> { +fn make_string(tok: TokenRef) -> String { String::Simple(SimpleString { value: tok.string, ..Default::default() }) } -fn make_strings<'input, 'a>( - s: Vec<(String<'input, 'a>, TokenRef<'input, 'a>)>, -) -> GrammarResult> { - if s.len() > MAX_RECURSION_DEPTH { - return Err("shorter concatenated string"); - } +fn make_strings<'a>(s: Vec<(String<'a>, TokenRef<'a>)>) -> String<'a> { let mut strings = s.into_iter().rev(); let (first, _) = strings.next().expect("no strings to make a string of"); - Ok(strings.fold(first, |acc, (str, tok)| { - let ret: String<'input, 'a> = String::Concatenated(ConcatenatedString { + strings.fold(first, |acc, (str, tok)| { + let ret: String<'a> = String::Concatenated(ConcatenatedString { left: Box::new(str), right: Box::new(acc), + whitespace_between: Default::default(), lpar: Default::default(), rpar: Default::default(), right_tok: tok, }); ret - })) + }) } -fn make_tstring_expression<'input, 'a>( - lbrace_tok: TokenRef<'input, 'a>, - expression: Expression<'input, 'a>, - eq: Option>, - conversion_pair: Option<(TokenRef<'input, 'a>, &'a str)>, - format_pair: Option<( - TokenRef<'input, 'a>, - Vec>, - )>, - rbrace_tok: TokenRef<'input, 'a>, -) -> TemplatedStringExpression<'input, 'a> { - let equal: Option> = eq.map(make_assign_equal); - let (conversion_tok, conversion) = if let Some((t, c)) = conversion_pair { - (Some(t), Some(c)) - } else { - (None, None) - }; - let (format_tok, format_spec) = if let Some((t, f)) = format_pair { - (Some(t), Some(f)) - } else { - (None, None) - }; - let after_expr_tok = if equal.is_some() { - None - } else if let Some(tok) = conversion_tok { - Some(tok) - } else if let Some(tok) = format_tok { - Some(tok) - } else { - Some(rbrace_tok) - }; - - TemplatedStringExpression { - expression, - conversion, - format_spec, - equal, - lbrace_tok, - after_expr_tok, - } -} - -fn make_fstring_expression<'input, 'a>( - lbrace_tok: TokenRef<'input, 'a>, - expression: Expression<'input, 'a>, - eq: Option>, - conversion_pair: Option<(TokenRef<'input, 'a>, &'a str)>, - format_pair: Option<( - TokenRef<'input, 'a>, - Vec>, - )>, - rbrace_tok: TokenRef<'input, 'a>, -) -> FormattedStringExpression<'input, 'a> { +fn make_fstring_expression<'a>( + lbrace_tok: TokenRef<'a>, + expression: Expression<'a>, + eq: Option>, + conversion_pair: Option<(TokenRef<'a>, &'a str)>, + format_pair: Option<(TokenRef<'a>, Vec>)>, + rbrace_tok: TokenRef<'a>, +) -> FormattedStringExpression<'a> { let equal = eq.map(make_assign_equal); let (conversion_tok, conversion) = if let Some((t, c)) = conversion_pair { (Some(t), Some(c)) @@ -3004,17 +3002,19 @@ fn make_fstring_expression<'input, 'a>( expression, conversion, format_spec, + whitespace_before_expression: Default::default(), + whitespace_after_expression: Default::default(), equal, lbrace_tok, after_expr_tok, } } -fn make_fstring<'input, 'a>( +fn make_fstring<'a>( start: &'a str, - parts: Vec>, + parts: Vec>, end: &'a str, -) -> FormattedString<'input, 'a> { +) -> FormattedString<'a> { FormattedString { start, parts, @@ -3024,126 +3024,180 @@ fn make_fstring<'input, 'a>( } } -fn make_tstring<'input, 'a>( - start: &'a str, - parts: Vec>, - end: &'a str, -) -> TemplatedString<'input, 'a> { - TemplatedString { - start, - parts, - end, - lpar: Default::default(), - rpar: Default::default(), - } -} - -fn make_finally<'input, 'a>( - finally_tok: TokenRef<'input, 'a>, - colon_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, -) -> Finally<'input, 'a> { +fn make_finally<'a>( + finally_tok: TokenRef<'a>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> Finally<'a> { Finally { body, + leading_lines: Default::default(), + whitespace_before_colon: Default::default(), finally_tok, colon_tok, } } -fn make_except<'input, 'a>( - except_tok: TokenRef<'input, 'a>, - exp: Option>, - as_: Option<(TokenRef<'input, 'a>, Name<'input, 'a>)>, - colon_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, -) -> ExceptHandler<'input, 'a> { +fn make_except<'a>( + except_tok: TokenRef<'a>, + exp: Option>, + as_: Option<(TokenRef<'a>, Name<'a>)>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> ExceptHandler<'a> { // TODO: AsName should come from outside - let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(Box::new(y)))); + let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(y))); ExceptHandler { body, r#type: exp, name, + leading_lines: Default::default(), + whitespace_after_except: Default::default(), + whitespace_before_colon: Default::default(), except_tok, colon_tok, } } -fn make_except_star<'input, 'a>( - except_tok: TokenRef<'input, 'a>, - star_tok: TokenRef<'input, 'a>, - exp: Expression<'input, 'a>, - as_: Option<(TokenRef<'input, 'a>, Name<'input, 'a>)>, - colon_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, -) -> ExceptStarHandler<'input, 'a> { +fn make_except_star<'a>( + except_tok: TokenRef<'a>, + star_tok: TokenRef<'a>, + exp: Expression<'a>, + as_: Option<(TokenRef<'a>, Name<'a>)>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> ExceptStarHandler<'a> { // TODO: AsName should come from outside - let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(Box::new(y)))); + let name = as_.map(|(x, y)| make_as_name(x, AssignTargetExpression::Name(y))); ExceptStarHandler { body, r#type: exp, name, + leading_lines: Default::default(), + whitespace_after_except: Default::default(), + whitespace_after_star: Default::default(), + whitespace_before_colon: Default::default(), except_tok, colon_tok, star_tok, } } -fn make_try<'input, 'a>( - try_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, - handlers: Vec>, - orelse: Option>, - finalbody: Option>, -) -> Try<'input, 'a> { +fn make_try<'a>( + try_tok: TokenRef<'a>, + body: Suite<'a>, + handlers: Vec>, + orelse: Option>, + finalbody: Option>, +) -> Try<'a> { Try { body, handlers, orelse, finalbody, + leading_lines: Default::default(), + whitespace_before_colon: Default::default(), try_tok, } } -fn make_try_star<'input, 'a>( - try_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, - handlers: Vec>, - orelse: Option>, - finalbody: Option>, -) -> TryStar<'input, 'a> { +fn make_try_star<'a>( + try_tok: TokenRef<'a>, + body: Suite<'a>, + handlers: Vec>, + orelse: Option>, + finalbody: Option>, +) -> TryStar<'a> { TryStar { body, handlers, orelse, finalbody, + leading_lines: Default::default(), + whitespace_before_colon: Default::default(), try_tok, } } -fn make_aug_op<'input, 'a>(tok: TokenRef<'input, 'a>) -> Result<'a, AugOp<'input, 'a>> { +fn make_aug_op(tok: TokenRef) -> Result { + let whitespace_before = Default::default(); + let whitespace_after = Default::default(); + Ok(match tok.string { - "+=" => AugOp::AddAssign { tok }, - "-=" => AugOp::SubtractAssign { tok }, - "*=" => AugOp::MultiplyAssign { tok }, - "@=" => AugOp::MatrixMultiplyAssign { tok }, - "/=" => AugOp::DivideAssign { tok }, - "%=" => AugOp::ModuloAssign { tok }, - "&=" => AugOp::BitAndAssign { tok }, - "|=" => AugOp::BitOrAssign { tok }, - "^=" => AugOp::BitXorAssign { tok }, - "<<=" => AugOp::LeftShiftAssign { tok }, - ">>=" => AugOp::RightShiftAssign { tok }, - "**=" => AugOp::PowerAssign { tok }, - "//=" => AugOp::FloorDivideAssign { tok }, + "+=" => AugOp::AddAssign { + whitespace_before, + whitespace_after, + tok, + }, + "-=" => AugOp::SubtractAssign { + whitespace_before, + whitespace_after, + tok, + }, + "*=" => AugOp::MultiplyAssign { + whitespace_before, + whitespace_after, + tok, + }, + "@=" => AugOp::MatrixMultiplyAssign { + whitespace_before, + whitespace_after, + tok, + }, + "/=" => AugOp::DivideAssign { + whitespace_before, + whitespace_after, + tok, + }, + "%=" => AugOp::ModuloAssign { + whitespace_before, + whitespace_after, + tok, + }, + "&=" => AugOp::BitAndAssign { + whitespace_before, + whitespace_after, + tok, + }, + "|=" => AugOp::BitOrAssign { + whitespace_before, + whitespace_after, + tok, + }, + "^=" => AugOp::BitXorAssign { + whitespace_before, + whitespace_after, + tok, + }, + "<<=" => AugOp::LeftShiftAssign { + whitespace_before, + whitespace_after, + tok, + }, + ">>=" => AugOp::RightShiftAssign { + whitespace_before, + whitespace_after, + tok, + }, + "**=" => AugOp::PowerAssign { + whitespace_before, + whitespace_after, + tok, + }, + "//=" => AugOp::FloorDivideAssign { + whitespace_before, + whitespace_after, + tok, + }, _ => return Err(ParserError::OperatorError), }) } -fn make_aug_assign<'input, 'a>( - target: AssignTargetExpression<'input, 'a>, - operator: AugOp<'input, 'a>, - value: Expression<'input, 'a>, -) -> AugAssign<'input, 'a> { +fn make_aug_assign<'a>( + target: AssignTargetExpression<'a>, + operator: AugOp<'a>, + value: Expression<'a>, +) -> AugAssign<'a> { AugAssign { target, operator, @@ -3152,11 +3206,11 @@ fn make_aug_assign<'input, 'a>( } } -fn make_with_item<'input, 'a>( - item: Expression<'input, 'a>, - as_: Option>, - n: Option>, -) -> WithItem<'input, 'a> { +fn make_with_item<'a>( + item: Expression<'a>, + as_: Option>, + n: Option>, +) -> WithItem<'a> { let asname = match (as_, n) { (Some(as_), Some(n)) => Some(make_as_name(as_, n)), (None, None) => None, @@ -3169,77 +3223,83 @@ fn make_with_item<'input, 'a>( } } -fn make_with<'input, 'a>( - async_tok: Option>, - with_tok: TokenRef<'input, 'a>, - lpar: Option>, - items: Vec>, - rpar: Option>, - colon_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, -) -> With<'input, 'a> { - let asynchronous = async_tok.as_ref().map(|_| make_async()); +fn make_with<'a>( + async_tok: Option>, + with_tok: TokenRef<'a>, + lpar: Option>, + items: Vec>, + rpar: Option>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> With<'a> { + let asynchronous = async_tok.as_ref().map(|_| Asynchronous { + whitespace_after: Default::default(), + }); With { items, body, asynchronous, + leading_lines: Default::default(), lpar, rpar, + whitespace_after_with: Default::default(), + whitespace_before_colon: Default::default(), async_tok, with_tok, colon_tok, } } -fn make_del<'input, 'a>( - tok: TokenRef<'input, 'a>, - target: DelTargetExpression<'input, 'a>, -) -> Del<'input, 'a> { +fn make_del<'a>(tok: TokenRef<'a>, target: DelTargetExpression<'a>) -> Del<'a> { Del { target, + whitespace_after_del: Default::default(), semicolon: Default::default(), tok, } } -fn make_del_tuple<'input, 'a>( - lpar: Option>, - elements: Vec>, - rpar: Option>, -) -> DelTargetExpression<'input, 'a> { - DelTargetExpression::Tuple(Box::new(Tuple { +fn make_del_tuple<'a>( + lpar: Option>, + elements: Vec>, + rpar: Option>, +) -> DelTargetExpression<'a> { + DelTargetExpression::Tuple(Tuple { elements, lpar: lpar.map(|x| vec![x]).unwrap_or_default(), rpar: rpar.map(|x| vec![x]).unwrap_or_default(), - })) + }) } -fn make_named_expr<'input, 'a>( - name: Name<'input, 'a>, - tok: TokenRef<'input, 'a>, - expr: Expression<'input, 'a>, -) -> NamedExpr<'input, 'a> { +fn make_named_expr<'a>(name: Name<'a>, tok: TokenRef<'a>, expr: Expression<'a>) -> NamedExpr<'a> { NamedExpr { - target: Box::new(Expression::Name(Box::new(name))), + target: Box::new(Expression::Name(name)), value: Box::new(expr), lpar: Default::default(), rpar: Default::default(), + whitespace_before_walrus: Default::default(), + whitespace_after_walrus: Default::default(), walrus_tok: tok, } } -fn make_match<'input, 'a>( - match_tok: TokenRef<'input, 'a>, - subject: Expression<'input, 'a>, - colon_tok: TokenRef<'input, 'a>, - indent_tok: TokenRef<'input, 'a>, - cases: Vec>, - dedent_tok: TokenRef<'input, 'a>, -) -> Match<'input, 'a> { +fn make_match<'a>( + match_tok: TokenRef<'a>, + subject: Expression<'a>, + colon_tok: TokenRef<'a>, + indent_tok: TokenRef<'a>, + cases: Vec>, + dedent_tok: TokenRef<'a>, +) -> Match<'a> { Match { subject, cases, + leading_lines: Default::default(), + whitespace_after_match: Default::default(), + whitespace_before_colon: Default::default(), + whitespace_after_colon: Default::default(), indent: Default::default(), + footer: Default::default(), match_tok, colon_tok, indent_tok, @@ -3247,13 +3307,13 @@ fn make_match<'input, 'a>( } } -fn make_case<'input, 'a>( - case_tok: TokenRef<'input, 'a>, - pattern: MatchPattern<'input, 'a>, - guard: Option<(TokenRef<'input, 'a>, Expression<'input, 'a>)>, - colon_tok: TokenRef<'input, 'a>, - body: Suite<'input, 'a>, -) -> MatchCase<'input, 'a> { +fn make_case<'a>( + case_tok: TokenRef<'a>, + pattern: MatchPattern<'a>, + guard: Option<(TokenRef<'a>, Expression<'a>)>, + colon_tok: TokenRef<'a>, + body: Suite<'a>, +) -> MatchCase<'a> { let (if_tok, guard) = match guard { Some((if_tok, guard)) => (Some(if_tok), Some(guard)), None => (None, None), @@ -3262,25 +3322,30 @@ fn make_case<'input, 'a>( pattern, guard, body, + leading_lines: Default::default(), + whitespace_after_case: Default::default(), + whitespace_before_if: Default::default(), + whitespace_after_if: Default::default(), + whitespace_before_colon: Default::default(), case_tok, if_tok, colon_tok, } } -fn make_match_value<'input, 'a>(value: Expression<'input, 'a>) -> MatchPattern<'input, 'a> { +fn make_match_value(value: Expression) -> MatchPattern { MatchPattern::Value(MatchValue { value }) } -fn make_match_singleton<'input, 'a>(value: Name<'input, 'a>) -> MatchPattern<'input, 'a> { +fn make_match_singleton(value: Name) -> MatchPattern { MatchPattern::Singleton(MatchSingleton { value }) } -fn make_list_pattern<'input, 'a>( - lbracket: Option>, - patterns: Vec>, - rbracket: Option>, -) -> MatchSequence<'input, 'a> { +fn make_list_pattern<'a>( + lbracket: Option>, + patterns: Vec>, + rbracket: Option>, +) -> MatchSequence<'a> { MatchSequence::MatchList(MatchList { patterns, lbracket, @@ -3290,28 +3355,34 @@ fn make_list_pattern<'input, 'a>( }) } -fn make_as_pattern<'input, 'a>( - pattern: Option>, - as_tok: Option>, - name: Option>, -) -> MatchPattern<'input, 'a> { +fn make_as_pattern<'a>( + pattern: Option>, + as_tok: Option>, + name: Option>, +) -> MatchPattern<'a> { MatchPattern::As(Box::new(MatchAs { pattern, name, lpar: Default::default(), rpar: Default::default(), + whitespace_before_as: Default::default(), + whitespace_after_as: Default::default(), as_tok, })) } -fn make_bit_or<'input, 'a>(tok: TokenRef<'input, 'a>) -> BitOr<'input, 'a> { - BitOr { tok } +fn make_bit_or(tok: TokenRef) -> BitOr { + BitOr { + whitespace_before: Default::default(), + whitespace_after: Default::default(), + tok, + } } -fn make_or_pattern<'input, 'a>( - first: MatchPattern<'input, 'a>, - rest: Vec<(TokenRef<'input, 'a>, MatchPattern<'input, 'a>)>, -) -> MatchPattern<'input, 'a> { +fn make_or_pattern<'a>( + first: MatchPattern<'a>, + rest: Vec<(TokenRef<'a>, MatchPattern<'a>)>, +) -> MatchPattern<'a> { if rest.is_empty() { return first; } @@ -3337,29 +3408,25 @@ fn make_or_pattern<'input, 'a>( })) } -fn ensure_real_number<'input, 'a>( - tok: TokenRef<'input, 'a>, -) -> GrammarResult> { +fn ensure_real_number(tok: TokenRef) -> GrammarResult { match make_number(tok) { e @ (Expression::Integer(_) | Expression::Float(_)) => Ok(e), _ => Err("real number"), } } -fn ensure_imaginary_number<'input, 'a>( - tok: TokenRef<'input, 'a>, -) -> GrammarResult> { +fn ensure_imaginary_number(tok: TokenRef) -> GrammarResult { match make_number(tok) { e @ Expression::Imaginary(_) => Ok(e), _ => Err("imaginary number"), } } -fn make_tuple_pattern<'input, 'a>( - lpar: LeftParen<'input, 'a>, - patterns: Vec>, - rpar: RightParen<'input, 'a>, -) -> MatchSequence<'input, 'a> { +fn make_tuple_pattern<'a>( + lpar: LeftParen<'a>, + patterns: Vec>, + rpar: RightParen<'a>, +) -> MatchSequence<'a> { MatchSequence::MatchTuple(MatchTuple { patterns, lpar: vec![lpar], @@ -3367,44 +3434,40 @@ fn make_tuple_pattern<'input, 'a>( }) } -fn make_open_sequence_pattern<'input, 'a>( - first: StarrableMatchSequenceElement<'input, 'a>, - comma: Comma<'input, 'a>, - mut rest: Vec>, -) -> Vec> { +fn make_open_sequence_pattern<'a>( + first: StarrableMatchSequenceElement<'a>, + comma: Comma<'a>, + mut rest: Vec>, +) -> Vec> { rest.insert(0, first.with_comma(comma)); rest } -fn make_match_sequence_element<'input, 'a>( - value: MatchPattern<'input, 'a>, -) -> MatchSequenceElement<'input, 'a> { +fn make_match_sequence_element(value: MatchPattern) -> MatchSequenceElement { MatchSequenceElement { value, comma: Default::default(), } } -fn make_match_star<'input, 'a>( - star_tok: TokenRef<'input, 'a>, - name: Option>, -) -> MatchStar<'input, 'a> { +fn make_match_star<'a>(star_tok: TokenRef<'a>, name: Option>) -> MatchStar<'a> { MatchStar { name, comma: Default::default(), + whitespace_before_name: Default::default(), star_tok, } } -fn make_match_mapping<'input, 'a>( - lbrace: LeftCurlyBrace<'input, 'a>, - mut elements: Vec>, - el_comma: Option>, - star_tok: Option>, - rest: Option>, - trailing_comma: Option>, - rbrace: RightCurlyBrace<'input, 'a>, -) -> MatchPattern<'input, 'a> { +fn make_match_mapping<'a>( + lbrace: LeftCurlyBrace<'a>, + mut elements: Vec>, + el_comma: Option>, + star_tok: Option>, + rest: Option>, + trailing_comma: Option>, + rbrace: RightCurlyBrace<'a>, +) -> MatchPattern<'a> { if let Some(c) = el_comma { if let Some(el) = elements.pop() { elements.push(el.with_comma(c)); @@ -3419,32 +3482,35 @@ fn make_match_mapping<'input, 'a>( rbrace, lpar: Default::default(), rpar: Default::default(), + whitespace_before_rest: Default::default(), star_tok, }) } -fn make_match_mapping_element<'input, 'a>( - key: Expression<'input, 'a>, - colon_tok: TokenRef<'input, 'a>, - pattern: MatchPattern<'input, 'a>, -) -> MatchMappingElement<'input, 'a> { +fn make_match_mapping_element<'a>( + key: Expression<'a>, + colon_tok: TokenRef<'a>, + pattern: MatchPattern<'a>, +) -> MatchMappingElement<'a> { MatchMappingElement { key, pattern, comma: Default::default(), + whitespace_before_colon: Default::default(), + whitespace_after_colon: Default::default(), colon_tok, } } -fn make_class_pattern<'input, 'a>( - cls: NameOrAttribute<'input, 'a>, - lpar_tok: TokenRef<'input, 'a>, - mut patterns: Vec>, - pat_comma: Option>, - mut kwds: Vec>, - kwd_comma: Option>, - rpar_tok: TokenRef<'input, 'a>, -) -> MatchPattern<'input, 'a> { +fn make_class_pattern<'a>( + cls: NameOrAttribute<'a>, + lpar_tok: TokenRef<'a>, + mut patterns: Vec>, + pat_comma: Option>, + mut kwds: Vec>, + kwd_comma: Option>, + rpar_tok: TokenRef<'a>, +) -> MatchPattern<'a> { if let Some(c) = pat_comma { if let Some(el) = patterns.pop() { patterns.push(el.with_comma(c)); @@ -3463,134 +3529,25 @@ fn make_class_pattern<'input, 'a>( kwds, lpar: Default::default(), rpar: Default::default(), + whitespace_after_cls: Default::default(), + whitespace_before_patterns: Default::default(), + whitespace_after_kwds: Default::default(), lpar_tok, rpar_tok, }) } -fn make_match_keyword_element<'input, 'a>( - key: Name<'input, 'a>, - equal_tok: TokenRef<'input, 'a>, - pattern: MatchPattern<'input, 'a>, -) -> MatchKeywordElement<'input, 'a> { +fn make_match_keyword_element<'a>( + key: Name<'a>, + equal_tok: TokenRef<'a>, + pattern: MatchPattern<'a>, +) -> MatchKeywordElement<'a> { MatchKeywordElement { key, pattern, comma: Default::default(), + whitespace_before_equal: Default::default(), + whitespace_after_equal: Default::default(), equal_tok, } } - -struct TypeParamBound<'input, 'a>(TokenRef<'input, 'a>, Expression<'input, 'a>); - -fn make_type_param_bound<'input, 'a>( - colon_tok: TokenRef<'input, 'a>, - e: Expression<'input, 'a>, -) -> TypeParamBound<'input, 'a> { - TypeParamBound(colon_tok, e) -} - -fn make_param_spec<'input, 'a>( - star_tok: TokenRef<'input, 'a>, - name: Name<'input, 'a>, - def: Option<(AssignEqual<'input, 'a>, Expression<'input, 'a>)>, -) -> TypeParam<'input, 'a> { - let (equal, default) = match def { - Some((a, b)) => (Some(a), Some(b)), - None => (None, None), - }; - TypeParam { - param: TypeVarLike::ParamSpec(ParamSpec { name, star_tok }), - comma: Default::default(), - equal: equal, - star: "", - default: default, - star_tok: None, - } -} - -fn make_type_var_tuple<'input, 'a>( - star_tok: TokenRef<'input, 'a>, - name: Name<'input, 'a>, - def: Option<( - AssignEqual<'input, 'a>, - Option>, - Expression<'input, 'a>, - )>, -) -> TypeParam<'input, 'a> { - let (equal, default_star, default) = match def { - Some((a, b, c)) => (Some(a), b, Some(c)), - None => (None, None, None), - }; - let star = match default_star { - Some(a) => a.string, - None => "", - }; - - TypeParam { - param: TypeVarLike::TypeVarTuple(TypeVarTuple { name, star_tok }), - comma: Default::default(), - equal: equal, - star: star, - default: default, - star_tok: default_star, - } -} - -fn make_type_var<'input, 'a>( - name: Name<'input, 'a>, - bound: Option>, - def: Option<(AssignEqual<'input, 'a>, Expression<'input, 'a>)>, -) -> TypeParam<'input, 'a> { - let (bound, colon) = match bound { - Some(TypeParamBound(c, e)) => (Some(Box::new(e)), Some(make_colon(c))), - _ => (None, None), - }; - let (equal, default) = match def { - Some((a, b)) => (Some(a), Some(b)), - None => (None, None), - }; - TypeParam { - param: TypeVarLike::TypeVar(TypeVar { name, bound, colon }), - comma: Default::default(), - equal: equal, - star: "", - default: default, - star_tok: None, - } -} - -fn make_type_parameters<'input, 'a>( - lbracket: LeftSquareBracket<'input, 'a>, - params: Vec>, - rbracket: RightSquareBracket<'input, 'a>, -) -> TypeParameters<'input, 'a> { - TypeParameters { - lbracket, - params, - rbracket, - } -} - -fn make_type_alias<'input, 'a>( - type_tok: TokenRef<'input, 'a>, - name: Name<'input, 'a>, - type_parameters: Option>, - equals_tok: TokenRef<'input, 'a>, - value: Expression<'input, 'a>, -) -> TypeAlias<'input, 'a> { - let lbracket_tok = if let Some(tp) = &type_parameters { - Some(tp.lbracket.tok) - } else { - None - }; - TypeAlias { - type_tok, - name, - type_parameters, - equals_tok, - value: Box::new(value), - semicolon: Default::default(), - lbracket_tok, - } -} diff --git a/native/libcst/src/parser/mod.rs b/native/libcst/src/parser/mod.rs index 4e9b4654..8e6ec8e1 100644 --- a/native/libcst/src/parser/mod.rs +++ b/native/libcst/src/parser/mod.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree @@ -8,5 +8,4 @@ mod grammar; mod numbers; pub use errors::ParserError; -pub(crate) use grammar::TokVec; pub use grammar::{python, Result}; diff --git a/native/libcst/src/parser/numbers.rs b/native/libcst/src/parser/numbers.rs index 95db532b..19d12214 100644 --- a/native/libcst/src/parser/numbers.rs +++ b/native/libcst/src/parser/numbers.rs @@ -1,69 +1,73 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree +use once_cell::sync::Lazy; use regex::Regex; -use crate::nodes::deflated::{Expression, Float, Imaginary, Integer}; +use crate::{Expression, Float, Imaginary, Integer}; static HEX: &str = r"0[xX](?:_?[0-9a-fA-F])+"; static BIN: &str = r"0[bB](?:_?[01])+"; static OCT: &str = r"0[oO](?:_?[0-7])+"; static DECIMAL: &str = r"(?:0(?:_?0)*|[1-9](?:_?[0-9])*)"; +static INTEGER_RE: Lazy = Lazy::new(|| { + Regex::new(format!("^({}|{}|{}|{})$", HEX, BIN, OCT, DECIMAL).as_str()).expect("regex") +}); + static EXPONENT: &str = r"[eE][-+]?[0-9](?:_?[0-9])*"; // Note: these don't exactly match the python implementation (exponent is not included) static POINT_FLOAT: &str = r"([0-9](?:_?[0-9])*\.(?:[0-9](?:_?[0-9])*)?|\.[0-9](?:_?[0-9])*)"; static EXP_FLOAT: &str = r"[0-9](?:_?[0-9])*"; -thread_local! { - static INTEGER_RE: Regex = - Regex::new(format!("^({}|{}|{}|{})$", HEX, BIN, OCT, DECIMAL).as_str()).expect("regex"); - static FLOAT_RE: Regex = - Regex::new( - format!( - "^({}({})?|{}{})$", - POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT - ) - .as_str(), +static FLOAT_RE: Lazy = Lazy::new(|| { + Regex::new( + format!( + "^({}({})?|{}{})$", + POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT ) - .expect("regex"); - static IMAGINARY_RE: Regex = - Regex::new( - format!( - r"^([0-9](?:_?[0-9])*[jJ]|({}({})?|{}{})[jJ])$", - POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT - ) - .as_str(), + .as_str(), + ) + .expect("regex") +}); + +static IMAGINARY_RE: Lazy = Lazy::new(|| { + Regex::new( + format!( + r"^([0-9](?:_?[0-9])*[jJ]|({}({})?|{}{})[jJ])$", + POINT_FLOAT, EXPONENT, EXP_FLOAT, EXPONENT ) - .expect("regex"); -} + .as_str(), + ) + .expect("regex") +}); pub(crate) fn parse_number(raw: &str) -> Expression { - if INTEGER_RE.with(|r| r.is_match(raw)) { - Expression::Integer(Box::new(Integer { + if INTEGER_RE.is_match(raw) { + Expression::Integer(Integer { value: raw, lpar: Default::default(), rpar: Default::default(), - })) - } else if FLOAT_RE.with(|r| r.is_match(raw)) { - Expression::Float(Box::new(Float { + }) + } else if FLOAT_RE.is_match(raw) { + Expression::Float(Float { value: raw, lpar: Default::default(), rpar: Default::default(), - })) - } else if IMAGINARY_RE.with(|r| r.is_match(raw)) { - Expression::Imaginary(Box::new(Imaginary { + }) + } else if IMAGINARY_RE.is_match(raw) { + Expression::Imaginary(Imaginary { value: raw, lpar: Default::default(), rpar: Default::default(), - })) + }) } else { - Expression::Integer(Box::new(Integer { + Expression::Integer(Integer { value: raw, lpar: Default::default(), rpar: Default::default(), - })) + }) } } diff --git a/native/libcst/src/py.rs b/native/libcst/src/py.rs index 68c03744..b938b691 100644 --- a/native/libcst/src/py.rs +++ b/native/libcst/src/py.rs @@ -1,31 +1,29 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree -use crate::nodes::traits::py::TryIntoPy; use pyo3::prelude::*; -#[pymodule(gil_used = false)] +#[pymodule] #[pyo3(name = "native")] -pub fn libcst_native(_py: Python, m: &Bound) -> PyResult<()> { +pub fn libcst_native(_py: Python, m: &PyModule) -> PyResult<()> { #[pyfn(m)] - #[pyo3(signature = (source, encoding=None))] - fn parse_module(source: String, encoding: Option<&str>) -> PyResult> { + fn parse_module(source: String, encoding: Option<&str>) -> PyResult { let m = crate::parse_module(source.as_str(), encoding)?; - Python::attach(|py| m.try_into_py(py)) + Python::with_gil(|py| Ok(m.into_py(py))) } #[pyfn(m)] - fn parse_expression(source: String) -> PyResult> { + fn parse_expression(source: String) -> PyResult { let expr = crate::parse_expression(source.as_str())?; - Python::attach(|py| expr.try_into_py(py)) + Python::with_gil(|py| Ok(expr.into_py(py))) } #[pyfn(m)] - fn parse_statement(source: String) -> PyResult> { + fn parse_statement(source: String) -> PyResult { let stm = crate::parse_statement(source.as_str())?; - Python::attach(|py| stm.try_into_py(py)) + Python::with_gil(|py| Ok(stm.into_py(py))) } Ok(()) diff --git a/native/libcst/src/tokenizer/core/mod.rs b/native/libcst/src/tokenizer/core/mod.rs index 120b6163..3ff97e7d 100644 --- a/native/libcst/src/tokenizer/core/mod.rs +++ b/native/libcst/src/tokenizer/core/mod.rs @@ -1,4 +1,4 @@ -// This implementation is Copyright (c) Meta Platforms, Inc. and affiliates. +// This implementation is Copyright (c) Meta Platforms, Inc. and its affiliates. // // CPython 3.10.0a5 and the original C code this is based on is // Copyright (c) 2001-2021 Python Software Foundation; All Rights Reserved @@ -58,6 +58,7 @@ /// [RustPython's parser]: https://crates.io/crates/rustpython-parser mod string_types; +use once_cell::sync::Lazy; use regex::Regex; use std::cell::RefCell; use std::cmp::Ordering; @@ -66,9 +67,8 @@ use std::fmt::Debug; use std::fmt::Formatter; use std::rc::Rc; -use crate::tokenizer::core::string_types::FTStringType; use crate::tokenizer::{ - core::string_types::{FTStringNode, StringQuoteChar, StringQuoteSize}, + core::string_types::{FStringNode, StringQuoteChar, StringQuoteSize}, operators::OPERATOR_RE, text_position::{TextPosition, TextPositionSnapshot}, whitespace_parser::State as WhitespaceState, @@ -83,27 +83,25 @@ const MAX_INDENT: usize = 100; // https://github.com/rust-lang/rust/issues/71763 const MAX_CHAR: char = '\u{10ffff}'; -thread_local! { - static SPACE_TAB_FORMFEED_RE: Regex = Regex::new(r"\A[ \f\t]+").expect("regex"); - static ANY_NON_NEWLINE_RE: Regex = Regex::new(r"\A[^\r\n]+").expect("regex"); - static STRING_PREFIX_RE: Regex = - Regex::new(r"\A(?i)(u|[bf]r|r[bft]|r|b|f|t)").expect("regex"); - static POTENTIAL_IDENTIFIER_TAIL_RE: Regex = - Regex::new(r"\A([a-zA-Z0-9_]|[^\x00-\x7f])+").expect("regex"); - static DECIMAL_DOT_DIGIT_RE: Regex = Regex::new(r"\A\.[0-9]").expect("regex"); - static DECIMAL_TAIL_RE: Regex = - Regex::new(r"\A[0-9](_?[0-9])*").expect("regex"); - static HEXADECIMAL_TAIL_RE: Regex = - Regex::new(r"\A(_?[0-9a-fA-F])+").expect("regex"); - static OCTAL_TAIL_RE: Regex = Regex::new(r"\A(_?[0-7])+").expect("regex"); - static BINARY_TAIL_RE: Regex = Regex::new(r"\A(_?[01])+").expect("regex"); +static SPACE_TAB_FORMFEED_RE: Lazy = Lazy::new(|| Regex::new(r"\A[ \f\t]+").expect("regex")); +static ANY_NON_NEWLINE_RE: Lazy = Lazy::new(|| Regex::new(r"\A[^\r\n]+").expect("regex")); +static STRING_PREFIX_RE: Lazy = + Lazy::new(|| Regex::new(r"\A(?i)(u|[bf]r|r[bf]|r|b|f)").expect("regex")); +static POTENTIAL_IDENTIFIER_TAIL_RE: Lazy = + Lazy::new(|| Regex::new(r"\A([a-zA-Z0-9_]|[^\x00-\x7f])+").expect("regex")); +static DECIMAL_DOT_DIGIT_RE: Lazy = Lazy::new(|| Regex::new(r"\A\.[0-9]").expect("regex")); +static DECIMAL_TAIL_RE: Lazy = + Lazy::new(|| Regex::new(r"\A[0-9](_?[0-9])*").expect("regex")); +static HEXADECIMAL_TAIL_RE: Lazy = + Lazy::new(|| Regex::new(r"\A(_?[0-9a-fA-F])+").expect("regex")); +static OCTAL_TAIL_RE: Lazy = Lazy::new(|| Regex::new(r"\A(_?[0-7])+").expect("regex")); +static BINARY_TAIL_RE: Lazy = Lazy::new(|| Regex::new(r"\A(_?[01])+").expect("regex")); - /// Used to verify identifiers when there's a non-ascii character in them. - // This changes across unicode revisions. We'd need to ship our own unicode tables to 100% match a - // given Python version's behavior. - static UNICODE_IDENTIFIER_RE: Regex = - Regex::new(r"\A[\p{XID_Start}_]\p{XID_Continue}*\z").expect("regex"); -} +/// Used to verify identifiers when there's a non-ascii character in them. +// This changes across unicode revisions. We'd need to ship our own unicode tables to 100% match a +// given Python version's behavior. +static UNICODE_IDENTIFIER_RE: Lazy = + Lazy::new(|| Regex::new(r"\A[\p{XID_Start}_]\p{XID_Continue}*\z").expect("regex")); #[derive(Debug, Eq, PartialEq, Copy, Clone)] pub enum TokType { @@ -116,12 +114,13 @@ pub enum TokType { Dedent, Async, Await, + // TODO; add support for these + #[allow(dead_code)] FStringStart, + #[allow(dead_code)] FStringString, + #[allow(dead_code)] FStringEnd, - TStringStart, - TStringString, - TStringEnd, EndMarker, } @@ -226,8 +225,8 @@ pub struct TokState<'t> { /// /// Supporting this at the tokenizer-level is pretty nasty and adds a lot of complexity. /// Eventually, we should probably support this at the parser-level instead. - split_ftstring: bool, - ftstring_stack: Vec, + split_fstring: bool, + fstring_stack: Vec, missing_nl_before_eof: bool, } @@ -237,7 +236,7 @@ pub struct TokConfig { /// identifiers, depending on if they're being used in the context of an async function. This /// breaks async comprehensions outside of async functions. pub async_hacks: bool, - pub split_ftstring: bool, + pub split_fstring: bool, // Not currently supported: // type_comments: bool, } @@ -276,8 +275,8 @@ impl<'t> TokState<'t> { async_def: false, async_def_indent: 0, async_def_nl: false, - split_ftstring: config.split_ftstring, - ftstring_stack: Vec::new(), + split_fstring: config.split_fstring, + fstring_stack: Vec::new(), missing_nl_before_eof: text.is_empty() || text.as_bytes()[text.len() - 1] != b'\n', } } @@ -289,18 +288,18 @@ impl<'t> TokState<'t> { /// Implementation of `next()`, wrapped by next() to allow for easier error handling. Roughly /// equivalent to `tok_get` in the C source code. fn next_inner(&mut self) -> Result> { - if self.split_ftstring { - if let Some(tos) = self.ftstring_stack.last() { + if self.split_fstring { + if let Some(tos) = self.fstring_stack.last() { if !tos.is_in_expr() { self.start_pos = (&self.text_pos).into(); let is_in_format_spec = tos.is_in_format_spec(); let is_raw_string = tos.is_raw_string; if let Some(tok) = - self.maybe_consume_ftstring_string(is_in_format_spec, is_raw_string)? + self.maybe_consume_fstring_string(is_in_format_spec, is_raw_string)? { return Ok(tok); } - if let Some(tok) = self.maybe_consume_ftstring_end() { + if let Some(tok) = self.maybe_consume_fstring_end() { return Ok(tok); } } @@ -321,11 +320,11 @@ impl<'t> TokState<'t> { 'again: loop { // Skip spaces - SPACE_TAB_FORMFEED_RE.with(|v| self.text_pos.consume(v)); + self.text_pos.consume(&*SPACE_TAB_FORMFEED_RE); // Skip comment, unless it's a type comment if self.text_pos.peek() == Some('#') { - ANY_NON_NEWLINE_RE.with(|v| self.text_pos.consume(v)); + self.text_pos.consume(&*ANY_NON_NEWLINE_RE); // type_comment is not supported } @@ -335,7 +334,10 @@ impl<'t> TokState<'t> { return match self.text_pos.peek() { // Check for EOF now None => { - if self.missing_nl_before_eof && !self.blank_line { + if self.missing_nl_before_eof + && self.text_pos.byte_column_number() != self.bol_width + && !self.blank_line + { self.at_bol = true; self.missing_nl_before_eof = false; Ok(TokType::Newline) @@ -366,12 +368,8 @@ impl<'t> TokState<'t> { Some('\n') => { self.text_pos.next(); self.at_bol = true; - if self.split_ftstring - && self - .ftstring_stack - .last() - .map(|node| node.allow_multiline()) - == Some(false) + if self.split_fstring + && !self.fstring_stack.iter().all(|node| node.allow_multiline()) { Err(TokError::UnterminatedString) } else if self.blank_line || !self.paren_stack.is_empty() { @@ -393,7 +391,7 @@ impl<'t> TokState<'t> { } // Number starting with period - Some('.') if DECIMAL_DOT_DIGIT_RE.with(|r| self.text_pos.matches(r)) => { + Some('.') if self.text_pos.matches(&*DECIMAL_DOT_DIGIT_RE) => { self.consume_number(NumberState::Fraction) } @@ -427,7 +425,7 @@ impl<'t> TokState<'t> { Some(ch @ '(') | Some(ch @ '[') | Some(ch @ '{') => { self.text_pos.next(); - if let Some(tos) = self.ftstring_stack.last_mut() { + if let Some(tos) = self.fstring_stack.last_mut() { tos.open_parentheses(); } self.paren_stack.push((ch, self.text_pos.line_number())); @@ -436,7 +434,7 @@ impl<'t> TokState<'t> { Some(closing @ ')') | Some(closing @ ']') | Some(closing @ '}') => { self.text_pos.next(); - if let Some(tos) = self.ftstring_stack.last_mut() { + if let Some(tos) = self.fstring_stack.last_mut() { tos.close_parentheses(); } if let Some((opening, line_number)) = self.paren_stack.pop() { @@ -461,7 +459,7 @@ impl<'t> TokState<'t> { Some(':') if self - .ftstring_stack + .fstring_stack .last() .map(|tos| tos.parentheses_count - tos.format_spec_count == 1) .unwrap_or(false) => @@ -472,16 +470,16 @@ impl<'t> TokState<'t> { // // >>> f'{x:=10}' # Valid, passes '=10' to formatter let tos = self - .ftstring_stack + .fstring_stack .last_mut() - .expect("ftstring_stack is not empty"); + .expect("fstring_stack is not empty"); tos.format_spec_count += 1; self.text_pos.next(); Ok(TokType::Op) } // Operator - Some(_) if OPERATOR_RE.with(|r| self.text_pos.consume(r)) => Ok(TokType::Op), + Some(_) if self.text_pos.consume(&*OPERATOR_RE) => Ok(TokType::Op), // Bad character // If nothing works, fall back to this error. CPython returns an OP in this case, @@ -535,13 +533,9 @@ impl<'t> TokState<'t> { } } - // Lines with only whitespace and/or comments and/or a line continuation - // character shouldn't affect the indentation and are not passed to the parser - // as NEWLINE tokens. - self.blank_line = matches!( - self.text_pos.peek(), - Some('#') | Some('\n') | Some('\\') | None - ); + // Lines with only whitespace and/or comments and/or a line continuation character shouldn't + // affect the indentation and are not passed to the parser as NEWLINE tokens. + self.blank_line = matches!(self.text_pos.peek(), Some('#') | Some('\n') | Some('\\')); if self.blank_line || !self.paren_stack.is_empty() { return Ok(()); @@ -631,27 +625,20 @@ impl<'t> TokState<'t> { } fn consume_identifier_or_prefixed_string(&mut self) -> Result> { - // Process the various legal combinations of b"", r"", u"",f"", and t"". - if STRING_PREFIX_RE.with(|r| self.text_pos.consume(r)) { + // Process the various legal combinations of b"", r"", u"", and f"". + if self.text_pos.consume(&*STRING_PREFIX_RE) { if let Some('"') | Some('\'') = self.text_pos.peek() { // We found a string, not an identifier. Bail! - if self.split_ftstring { - let res = match self + if self.split_fstring + && self .text_pos .slice_from_start_pos(&self.start_pos) - .chars() - .find(|c| matches!(c, 'f' | 'F' | 't' | 'T')) - { - Some('f' | 'F') => Some(FTStringType::FString), - Some('t' | 'T') => Some(FTStringType::TString), - _ => None, - }; - if let Some(str_type) = res { - // Consume the prefix and return the start token - return self.consume_prefixed_string_start(str_type); - } + .contains(&['f', 'F'][..]) + { + return self.consume_fstring_start(); + } else { + return self.consume_string(); } - return self.consume_string(); } } else { // the next character must be a potential identifier start, aka `[a-zA-Z_]|[^\x00-\x7f]` @@ -661,7 +648,7 @@ impl<'t> TokState<'t> { Some('a'..='z') | Some('A'..='Z') | Some('_') | Some('\u{80}'..=MAX_CHAR) )); } - POTENTIAL_IDENTIFIER_TAIL_RE.with(|r| self.text_pos.consume(r)); + self.text_pos.consume(&*POTENTIAL_IDENTIFIER_TAIL_RE); let identifier_str = self.text_pos.slice_from_start_pos(&self.start_pos); if !verify_identifier(identifier_str) { // TODO: async/await @@ -707,7 +694,7 @@ impl<'t> TokState<'t> { match self.text_pos.peek() { Some('x') | Some('X') => { self.text_pos.next(); - if !HEXADECIMAL_TAIL_RE.with(|r| self.text_pos.consume(r)) + if !self.text_pos.consume(&*HEXADECIMAL_TAIL_RE) || self.text_pos.peek() == Some('_') { Err(TokError::BadHexadecimal) @@ -717,7 +704,7 @@ impl<'t> TokState<'t> { } Some('o') | Some('O') => { self.text_pos.next(); - if !OCTAL_TAIL_RE.with(|r| self.text_pos.consume(r)) + if !self.text_pos.consume(&*OCTAL_TAIL_RE) || self.text_pos.peek() == Some('_') { return Err(TokError::BadOctal); @@ -731,7 +718,7 @@ impl<'t> TokState<'t> { } Some('b') | Some('B') => { self.text_pos.next(); - if !BINARY_TAIL_RE.with(|r| self.text_pos.consume(r)) + if !self.text_pos.consume(&*BINARY_TAIL_RE) || self.text_pos.peek() == Some('_') { return Err(TokError::BadBinary); @@ -835,7 +822,7 @@ impl<'t> TokState<'t> { /// Processes a decimal tail. This is the bit after the dot or after an E in a float. fn consume_decimal_tail(&mut self) -> Result<(), TokError<'t>> { - let result = DECIMAL_TAIL_RE.with(|r| self.text_pos.consume(r)); + let result = self.text_pos.consume(&*DECIMAL_TAIL_RE); // Assumption: If we've been called, the first character is an integer, so we must have a // regex match debug_assert!(result, "try_decimal_tail was called on a non-digit char"); @@ -894,43 +881,23 @@ impl<'t> TokState<'t> { Ok(TokType::String) } - fn consume_prefixed_string_start( - &mut self, - str_type: FTStringType, - ) -> Result> { - // Consumes everything after the (f|t) but before the actual string. + fn consume_fstring_start(&mut self) -> Result> { let (quote_char, quote_size) = self.consume_open_quote(); let is_raw_string = self .text_pos .slice_from_start_pos(&self.start_pos) .contains(&['r', 'R'][..]); - self.ftstring_stack.push(FTStringNode::new( - quote_char, - quote_size, - is_raw_string, - str_type.clone(), - )); - - match str_type { - FTStringType::FString => Ok(TokType::FStringStart), - FTStringType::TString => Ok(TokType::TStringStart), - } + self.fstring_stack + .push(FStringNode::new(quote_char, quote_size, is_raw_string)); + Ok(TokType::FStringStart) } - fn maybe_consume_ftstring_string( + fn maybe_consume_fstring_string( &mut self, is_in_format_spec: bool, is_raw_string: bool, ) -> Result, TokError<'t>> { - let allow_multiline = self - .ftstring_stack - .last() - .map(|node| node.allow_multiline()) - == Some(true); - let str_type = self - .ftstring_stack - .last() - .map(|node| node.string_type.clone()); + let allow_multiline = self.fstring_stack.iter().all(|node| node.allow_multiline()); let mut in_named_unicode: bool = false; let mut ok_result = Ok(None); // value to return if we reach the end and don't error out 'outer: loop { @@ -942,8 +909,8 @@ impl<'t> TokState<'t> { return Err(TokError::UnterminatedString); } (ch @ Some('\''), _) | (ch @ Some('"'), _) => { - // see if this actually terminates the most recent fstring - if let Some(node) = self.ftstring_stack.last() { + // see if this actually terminates something in fstring_stack + for node in self.fstring_stack.iter() { if ch == Some(node.quote_char.into()) { match node.quote_size { StringQuoteSize::Single => { @@ -969,25 +936,7 @@ impl<'t> TokState<'t> { // skip escaped char (e.g. \', \", or newline/line continuation) self.text_pos.next(); } - } else if let Some( - '\n' - | '\\' - | '\'' - | '"' - | 'a' - | 'b' - | 'f' - | 'n' - | 'r' - | 't' - | 'v' - | 'x' - | '0'..='9' - | 'N' - | 'u' - | 'U', - ) = self.text_pos.peek() - { + } else { // skip escaped char let next_ch = self.text_pos.next(); // check if this is a \N sequence @@ -1000,13 +949,6 @@ impl<'t> TokState<'t> { } } } - (Some('\\'), _) if is_raw_string => { - self.text_pos.next(); - // skip escaped end-of-string marker or backslash - if let Some('"' | '\'' | '\\') = self.text_pos.peek() { - self.text_pos.next(); - } - } (Some('{'), _) => { if is_in_format_spec { // don't actually consume the {, and generate an OP for it instead @@ -1032,33 +974,34 @@ impl<'t> TokState<'t> { self.text_pos.next(); } } - ok_result = match str_type { - Some(FTStringType::FString) => Ok(Some(TokType::FStringString)), - Some(FTStringType::TString) => Ok(Some(TokType::TStringString)), - None => unreachable!("We should always have a string type"), - }; + ok_result = Ok(Some(TokType::FStringString)); } ok_result } - fn maybe_consume_ftstring_end(&mut self) -> Option { + fn maybe_consume_fstring_end(&mut self) -> Option { let ch = self.text_pos.peek(); - if let Some(node) = self.ftstring_stack.last() { + let mut match_idx = None; + for (idx, node) in self.fstring_stack.iter().enumerate() { if ch == Some(node.quote_char.into()) { if node.quote_size == StringQuoteSize::Triple { - self.text_pos.consume(node.quote_char.triple_str()); + if self.text_pos.consume(node.quote_char.triple_str()) { + match_idx = Some(idx); + break; + } } else { self.text_pos.next(); // already matched + match_idx = Some(idx); + break; } - let tok_type = match node.string_type { - FTStringType::FString => TokType::FStringEnd, - FTStringType::TString => TokType::TStringEnd, - }; - self.ftstring_stack.pop(); - return Some(tok_type); } } - None + if let Some(match_idx) = match_idx { + self.fstring_stack.truncate(match_idx); + Some(TokType::FStringEnd) + } else { + None + } } } @@ -1093,7 +1036,7 @@ fn verify_identifier(name: &str) -> bool { // TODO: If `name` is non-ascii, must first normalize name to NFKC. // Common case: If the entire string is ascii, we can avoid the more expensive regex check, // since the tokenizer already validates ascii characters before calling us. - name.is_ascii() || UNICODE_IDENTIFIER_RE.with(|r| r.is_match(name)) + name.is_ascii() || UNICODE_IDENTIFIER_RE.is_match(name) } #[derive(Clone)] diff --git a/native/libcst/src/tokenizer/core/string_types.rs b/native/libcst/src/tokenizer/core/string_types.rs index 09a51851..27f0be36 100644 --- a/native/libcst/src/tokenizer/core/string_types.rs +++ b/native/libcst/src/tokenizer/core/string_types.rs @@ -1,4 +1,4 @@ -// This implementation is Copyright (c) Meta Platforms, Inc. and affiliates. +// This implementation is Copyright (c) Meta Platforms, Inc. and its affiliates. // // CPython 3.10.0a5 and the original C code this is based on is // Copyright (c) 2001-2021 Python Software Foundation; All Rights Reserved @@ -67,13 +67,7 @@ impl TryFrom> for StringQuoteChar { } #[derive(Clone)] -pub enum FTStringType { - FString, - TString, -} - -#[derive(Clone)] -pub struct FTStringNode { +pub struct FStringNode { pub quote_char: StringQuoteChar, pub quote_size: StringQuoteSize, pub parentheses_count: usize, @@ -81,16 +75,13 @@ pub struct FTStringNode { // In the syntax there can be multiple format_spec's nested: {x:{y:3}} pub format_spec_count: usize, pub is_raw_string: bool, - // ftstring type; either f-string or a t-string - pub string_type: FTStringType, } -impl FTStringNode { +impl FStringNode { pub fn new( quote_char: StringQuoteChar, quote_size: StringQuoteSize, is_raw_string: bool, - string_type: FTStringType, ) -> Self { Self { quote_char, @@ -99,7 +90,6 @@ impl FTStringNode { string_start: None, format_spec_count: 0, is_raw_string, - string_type, } } @@ -108,14 +98,15 @@ impl FTStringNode { } pub fn close_parentheses(&mut self) { - if self.is_in_format_spec() { - self.format_spec_count -= 1; - } self.parentheses_count -= 1; + if self.parentheses_count == 0 { + // No parentheses means that the format spec is also finished. + self.format_spec_count = 0; + } } pub fn allow_multiline(&self) -> bool { - self.quote_size == StringQuoteSize::Triple || self.is_in_expr() + self.quote_size == StringQuoteSize::Triple } pub fn is_in_expr(&self) -> bool { diff --git a/native/libcst/src/tokenizer/debug_utils.rs b/native/libcst/src/tokenizer/debug_utils.rs index 1e476a47..543258a1 100644 --- a/native/libcst/src/tokenizer/debug_utils.rs +++ b/native/libcst/src/tokenizer/debug_utils.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/mod.rs b/native/libcst/src/tokenizer/mod.rs index 9f7bbe2c..60f1a20d 100644 --- a/native/libcst/src/tokenizer/mod.rs +++ b/native/libcst/src/tokenizer/mod.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/operators.rs b/native/libcst/src/tokenizer/operators.rs index 51352900..94f70805 100644 --- a/native/libcst/src/tokenizer/operators.rs +++ b/native/libcst/src/tokenizer/operators.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. @@ -8,6 +8,7 @@ // code or that we retain the original work's copyright information. // https://docs.python.org/3/license.html#zero-clause-bsd-license-for-code-in-the-python-release-documentation +use once_cell::sync::Lazy; use regex::Regex; /// A list of strings that make up all the possible operators in a specific version of Python. @@ -60,7 +61,7 @@ pub const OPERATORS: &[&str] = &[ "->", // RARROW "...", // ELLIPSIS ":=", // COLONEQUAL - // Not a real operator, but needed to support the split_ftstring feature + // Not a real operator, but needed to support the split_fstring feature "!", // The fake operator added by PEP 401. Technically only valid if used with: // @@ -68,8 +69,7 @@ pub const OPERATORS: &[&str] = &[ "<>", ]; -thread_local! { -pub static OPERATOR_RE: Regex = { +pub static OPERATOR_RE: Lazy = Lazy::new(|| { // sort operators so that we try to match the longest ones first let mut sorted_operators: Box<[&str]> = OPERATORS.into(); sorted_operators.sort_unstable_by_key(|op| usize::MAX - op.len()); @@ -82,5 +82,4 @@ pub static OPERATOR_RE: Regex = { .join("|") )) .expect("regex") -}; -} +}); diff --git a/native/libcst/src/tokenizer/tests.rs b/native/libcst/src/tokenizer/tests.rs index 20188f47..db437857 100644 --- a/native/libcst/src/tokenizer/tests.rs +++ b/native/libcst/src/tokenizer/tests.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. @@ -11,7 +11,7 @@ use crate::tokenizer::core::{TokConfig, TokError, TokState, TokType}; fn default_config() -> TokConfig { TokConfig { async_hacks: false, - split_ftstring: false, + split_fstring: false, } } @@ -519,54 +519,12 @@ fn test_string_prefix() { (TokType::String, "''"), ]), ); - - // raw string escapes - assert_eq!( - tokenize_all("r'\\''", &default_config()), - Ok(vec![(TokType::String, "r'\\''")]), - ); - assert_eq!( - tokenize_all(r#"r"\"""#, &default_config()), - Ok(vec![(TokType::String, r#"r"\"""#)]), - ); - assert_eq!( - tokenize_all(r#"r'\\'"#, &default_config()), - Ok(vec![(TokType::String, r#"r'\\'"#)]), - ); - let config = TokConfig { - split_ftstring: true, - ..default_config() - }; - assert_eq!( - tokenize_all("rf'\\''", &config), - Ok(vec![ - (TokType::FStringStart, "rf'"), - (TokType::FStringString, "\\'"), - (TokType::FStringEnd, "'"), - ]), - ); - assert_eq!( - tokenize_all(r#"rf"\"""#, &config), - Ok(vec![ - (TokType::FStringStart, "rf\""), - (TokType::FStringString, r#"\""#), - (TokType::FStringEnd, "\""), - ]), - ); - assert_eq!( - tokenize_all(r#"rf'\\'"#, &config), - Ok(vec![ - (TokType::FStringStart, "rf'"), - (TokType::FStringString, r#"\\"#), - (TokType::FStringEnd, "'"), - ]), - ); } #[test] -fn test_split_ftstring() { +fn test_split_fstring() { let config = TokConfig { - split_ftstring: true, + split_fstring: true, ..default_config() }; @@ -659,34 +617,6 @@ fn test_split_ftstring() { ); } -#[test] -fn test_fstring_escapes() { - let config = TokConfig { - split_ftstring: true, - ..default_config() - }; - assert_eq!( - tokenize_all("f'\\{{\\}}'", &config), - Ok(vec![ - (TokType::FStringStart, "f'"), - (TokType::FStringString, "\\{{\\}}"), - (TokType::FStringEnd, "'"), - ]) - ); - assert_eq!( - tokenize_all(r#"f"regexp_like(path, '.*\{file_type}$')""#, &config), - Ok(vec![ - (TokType::FStringStart, "f\""), - (TokType::FStringString, "regexp_like(path, '.*\\"), - (TokType::Op, "{"), - (TokType::Name, "file_type"), - (TokType::Op, "}"), - (TokType::FStringString, "$')"), - (TokType::FStringEnd, "\""), - ]) - ); -} - #[test] fn test_operator() { assert_eq!( @@ -718,19 +648,6 @@ fn test_fake_newline() { ); } -#[test] -fn test_fake_newline_when_at_bol() { - assert_eq!( - tokenize_with_end_marker("(\n \\\n)", &default_config()), - Ok(vec![ - (TokType::Op, "("), - (TokType::Op, ")"), - (TokType::Newline, ""), - (TokType::EndMarker, "") - ]) - ) -} - #[test] fn test_no_fake_newline_for_empty_input() { assert_eq!( @@ -810,110 +727,3 @@ fn test_add_dedents_for_dangling_indent_with_comment() { ]) ); } - -#[test] -fn test_inconsistent_indentation_at_eof() { - assert_eq!( - tokenize_all("if 1:\n pass\n ", &default_config()), - Ok(vec![ - (TokType::Name, "if"), - (TokType::Number, "1"), - (TokType::Op, ":"), - (TokType::Newline, "\n"), - (TokType::Indent, ""), - (TokType::Name, "pass"), - (TokType::Newline, "\n"), - (TokType::Dedent, ""), - ]) - ) -} - -#[test] -fn test_nested_f_string_specs() { - let config = TokConfig { - split_ftstring: true, - ..default_config() - }; - assert_eq!( - tokenize_all("f'{_:{_:}{_}}'", &config), - Ok(vec![ - (TokType::FStringStart, "f'"), - (TokType::Op, "{"), - (TokType::Name, "_"), - (TokType::Op, ":"), - (TokType::Op, "{"), - (TokType::Name, "_"), - (TokType::Op, ":"), - (TokType::Op, "}"), - (TokType::Op, "{"), - (TokType::Name, "_"), - (TokType::Op, "}"), - (TokType::Op, "}"), - (TokType::FStringEnd, "'") - ]) - ) -} - -#[test] -fn test_nested_f_strings() { - let config = TokConfig { - split_ftstring: true, - ..default_config() - }; - assert_eq!( - tokenize_all("f'{f'{2}'}'", &config), - Ok(vec![ - (TokType::FStringStart, "f'"), - (TokType::Op, "{"), - (TokType::FStringStart, "f'"), - (TokType::Op, "{"), - (TokType::Number, "2"), - (TokType::Op, "}"), - (TokType::FStringEnd, "'"), - (TokType::Op, "}"), - (TokType::FStringEnd, "'") - ]) - ) -} -#[test] -fn test_can_tokenize_t_string_basic() { - let config = TokConfig { - split_ftstring: true, - ..default_config() - }; - assert_eq!( - tokenize_all("t'Nothing to see here, move along'", &config), - Ok(vec![ - (TokType::TStringStart, "t'"), - (TokType::TStringString, "Nothing to see here, move along"), - (TokType::TStringEnd, "'") - ]) - ) -} -#[test] -fn test_can_tokenize_f_and_t_strings() { - let config = TokConfig { - split_ftstring: true, - ..default_config() - }; - assert_eq!( - tokenize_all("t\"TMiddle{f'FMiddle{t'{2}'}'}\"", &config), - Ok(vec![ - (TokType::TStringStart, "t\""), - (TokType::TStringString, "TMiddle"), - (TokType::Op, "{"), - (TokType::FStringStart, "f'"), - (TokType::FStringString, "FMiddle"), - (TokType::Op, "{"), - (TokType::TStringStart, "t'"), - (TokType::Op, "{"), - (TokType::Number, "2"), - (TokType::Op, "}"), - (TokType::TStringEnd, "'"), - (TokType::Op, "}"), - (TokType::FStringEnd, "'"), - (TokType::Op, "}"), - (TokType::TStringEnd, "\"") - ]) - ) -} diff --git a/native/libcst/src/tokenizer/text_position/char_width.rs b/native/libcst/src/tokenizer/text_position/char_width.rs index 10c65a41..879e1283 100644 --- a/native/libcst/src/tokenizer/text_position/char_width.rs +++ b/native/libcst/src/tokenizer/text_position/char_width.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. diff --git a/native/libcst/src/tokenizer/text_position/mod.rs b/native/libcst/src/tokenizer/text_position/mod.rs index 42a7b682..f94d7311 100644 --- a/native/libcst/src/tokenizer/text_position/mod.rs +++ b/native/libcst/src/tokenizer/text_position/mod.rs @@ -1,16 +1,19 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. mod char_width; +use once_cell::sync::Lazy; use regex::Regex; use std::fmt; use crate::tokenizer::debug_utils::EllipsisDebug; use char_width::NewlineNormalizedCharWidths; +static CR_OR_LF_RE: Lazy = Lazy::new(|| Regex::new(r"[\r\n]").expect("regex")); + pub trait TextPattern { fn match_len(&self, text: &str) -> Option; } @@ -95,7 +98,7 @@ impl<'t> TextPosition<'t> { match match_len { Some(match_len) => { assert!( - !rest_of_text[..match_len].contains(|x| x == '\r' || x == '\n'), + !CR_OR_LF_RE.is_match(&rest_of_text[..match_len]), "matches pattern must not match a newline", ); true @@ -114,10 +117,6 @@ impl<'t> TextPosition<'t> { .inner_char_column_number .checked_sub(1) .expect("cannot back up past the beginning of a line."); - self.inner_byte_column_number = self - .inner_byte_column_number - .checked_sub(cw.byte_width) - .expect("cannot back up past the beginning of a line."); self.inner_byte_idx -= cw.byte_width; } else { panic!("Tried to backup past the beginning of the text.") @@ -218,7 +217,6 @@ impl fmt::Debug for TextPosition<'_> { .field("char_widths", &EllipsisDebug) .field("inner_byte_idx", &self.inner_byte_idx) .field("inner_char_column_number", &self.inner_char_column_number) - .field("inner_byte_column_number", &self.inner_byte_column_number) .field("inner_line_number", &self.inner_line_number) .finish() } diff --git a/native/libcst/src/tokenizer/whitespace_parser.rs b/native/libcst/src/tokenizer/whitespace_parser.rs index 3f96bab2..ec463f99 100644 --- a/native/libcst/src/tokenizer/whitespace_parser.rs +++ b/native/libcst/src/tokenizer/whitespace_parser.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree @@ -7,13 +7,19 @@ use crate::nodes::{ Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace, SimpleWhitespace, TrailingWhitespace, }; -use memchr::{memchr2, memchr2_iter}; +use once_cell::sync::Lazy; +use regex::Regex; use thiserror::Error; use crate::Token; use super::TokType; +static SIMPLE_WHITESPACE_RE: Lazy = + Lazy::new(|| Regex::new(r"\A([ \f\t]|\\(\r\n?|\n))*").expect("regex")); +static NEWLINE_RE: Lazy = Lazy::new(|| Regex::new(r"\A(\r\n?|\n)").expect("regex")); +static COMMENT_RE: Lazy = Lazy::new(|| Regex::new(r"\A#[^\r\n]*").expect("regex")); + #[allow(clippy::upper_case_acronyms, clippy::enum_variant_names)] #[derive(Error, Debug, PartialEq, Eq)] pub enum WhitespaceError { @@ -67,44 +73,15 @@ impl<'a> Config<'a> { break; } } - - let mut lines = Vec::new(); - let mut start = 0; - let mut newline_positions = memchr2_iter(b'\n', b'\r', input.as_bytes()); - - while let Some(newline_position) = newline_positions.next() { - let newline_character = input.as_bytes()[newline_position] as char; - - let len = if newline_character == '\r' - && input.as_bytes().get(newline_position + 1) == Some(&b'\n') - { - // Skip the next '\n' - newline_positions.next(); - 2 - } else { - 1 - }; - - let end = newline_position + len; - lines.push(&input[start..end]); - start = end; - } - - // Push the last line if it isn't terminated by a newline character - if start < input.len() { - lines.push(&input[start..]); - } - - let default_newline = match lines.first().map(|line| line.as_bytes()).unwrap_or(&[]) { - [.., b'\r', b'\n'] => "\r\n", - [.., b'\n'] => "\n", - [.., b'\r'] => "\r", - _ => "\n", - }; + let default_newline = Regex::new(r"\r\n?|\n") + .expect("regex") + .find(input) + .map(|m| m.as_str()) + .unwrap_or("\n"); Self { input, - lines, + lines: input.split_inclusive(default_newline).collect(), default_newline, default_indent, } @@ -223,35 +200,32 @@ pub fn parse_empty_lines<'a>( } pub fn parse_comment<'a>(config: &Config<'a>, state: &mut State) -> Result>> { - let newline_after = config.get_line_after_column(state.line, state.column_byte)?; - if newline_after.as_bytes().first() != Some(&b'#') { - return Ok(None); + if let Some(comment_match) = + COMMENT_RE.find(config.get_line_after_column(state.line, state.column_byte)?) + { + let comment_str = comment_match.as_str(); + advance_this_line( + config, + state, + comment_str.chars().count(), + comment_str.len(), + )?; + return Ok(Some(Comment(comment_str))); } - let comment_str = if let Some(idx) = memchr2(b'\n', b'\r', newline_after.as_bytes()) { - &newline_after[..idx] - } else { - newline_after - }; - advance_this_line( - config, - state, - comment_str.chars().count(), - comment_str.len(), - )?; - Ok(Some(Comment(comment_str))) + Ok(None) } pub fn parse_newline<'a>(config: &Config<'a>, state: &mut State) -> Result>> { - let newline_after = config.get_line_after_column(state.line, state.column_byte)?; - let len = match newline_after.as_bytes() { - [b'\n', ..] => 1, - [b'\r', b'\n', ..] => 2, - [b'\r', ..] => 1, - _ => 0, - }; - if len > 0 { - let newline_str = &newline_after[..len]; - advance_this_line(config, state, len, len)?; + if let Some(newline_match) = + NEWLINE_RE.find(config.get_line_after_column(state.line, state.column_byte)?) + { + let newline_str = newline_match.as_str(); + advance_this_line( + config, + state, + newline_str.chars().count(), + newline_str.len(), + )?; if state.column_byte != config.get_line(state.line)?.len() { return Err(WhitespaceError::InternalError(format!( "Found newline at ({}, {}) but it's not EOL", @@ -374,18 +348,12 @@ pub fn parse_simple_whitespace<'a>( state: &mut State, ) -> Result> { let capture_ws = |line, col| -> Result<&'a str> { - let line = config.get_line_after_column(line, col)?; - let bytes = line.as_bytes(); - let mut idx = 0; - while idx < bytes.len() { - match bytes[idx..] { - [b' ' | b'\t' | b'\x0c', ..] => idx += 1, - [b'\\', b'\r', b'\n', ..] => idx += 3, - [b'\\', b'\r' | b'\n', ..] => idx += 2, - _ => break, - } - } - Ok(&line[..idx]) + let x = config.get_line_after_column(line, col); + let x = x?; + Ok(SIMPLE_WHITESPACE_RE + .find(x) + .expect("SIMPLE_WHITESPACE_RE supports 0-length matches, so it must always match") + .as_str()) }; let start_offset = state.byte_offset; let mut prev_line: &str; @@ -436,92 +404,3 @@ pub fn parse_parenthesized_whitespace<'a>( Ok(None) } } - -#[cfg(test)] -mod tests { - use crate::{tokenize, Comment, Config, Result, SimpleWhitespace}; - - use super::{parse_comment, parse_simple_whitespace}; - - #[test] - fn config_mixed_newlines() -> Result<'static, ()> { - let source = "'' % {\n'test1': '',\r 'test2': '',\r\n}"; - let tokens = tokenize(source)?; - - let config = Config::new(source, &tokens); - - assert_eq!( - &config.lines, - &["'' % {\n", "'test1': '',\r", " 'test2': '',\r\n", "}"] - ); - - Ok(()) - } - - fn _parse_simple_whitespace(src: &str) -> Result { - let tokens = tokenize(src)?; - let config = Config::new(src, &tokens); - let mut state = Default::default(); - Ok(parse_simple_whitespace(&config, &mut state)?) - } - - #[test] - fn simple_whitespace_line_continuations() -> Result<'static, ()> { - assert_eq!( - _parse_simple_whitespace(" \\\n # foo")?, - SimpleWhitespace(" \\\n ") - ); - - assert_eq!( - _parse_simple_whitespace(" \\\r # foo")?, - SimpleWhitespace(" \\\r ") - ); - assert_eq!( - _parse_simple_whitespace(" \\\r\n # foo")?, - SimpleWhitespace(" \\\r\n ") - ); - - assert_eq!( - _parse_simple_whitespace(" \\\r\n\\\n # foo")?, - SimpleWhitespace(" \\\r\n\\\n ") - ); - - Ok(()) - } - - #[test] - fn simple_whitespace_mixed() -> Result<'static, ()> { - assert_eq!( - _parse_simple_whitespace(" \t\x0clol")?, - SimpleWhitespace(" \t\x0c"), - ); - - Ok(()) - } - - fn _parse_comment(src: &str) -> Result> { - let tokens = tokenize(src)?; - let config = Config::new(src, &tokens); - let mut state = Default::default(); - Ok(parse_comment(&config, &mut state)?) - } - - #[test] - fn single_comment() -> Result<'static, ()> { - assert_eq!(_parse_comment("# foo\n# bar")?, Some(Comment("# foo"))); - Ok(()) - } - - #[test] - fn comment_until_eof() -> Result<'static, ()> { - assert_eq!(_parse_comment("#")?, Some(Comment("#"))); - Ok(()) - } - - #[test] - fn no_comment() -> Result<'static, ()> { - assert_eq!(_parse_comment("foo")?, None); - assert_eq!(_parse_comment("\n")?, None); - Ok(()) - } -} diff --git a/native/libcst/tests/.gitattributes b/native/libcst/tests/.gitattributes deleted file mode 100644 index e50e9b7b..00000000 --- a/native/libcst/tests/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -fixtures/mixed_newlines.py autocrlf=false \ No newline at end of file diff --git a/native/libcst/tests/fixtures/big_binary_operator.py b/native/libcst/tests/fixtures/big_binary_operator.py deleted file mode 100644 index 2ab0d65c..00000000 --- a/native/libcst/tests/fixtures/big_binary_operator.py +++ /dev/null @@ -1,32 +0,0 @@ -( # 350 binary operators lets go - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' + - 'X' + 'Y' + 'Z' + 'Q' + 'T' -) diff --git a/native/libcst/tests/fixtures/expr.py b/native/libcst/tests/fixtures/expr.py index abb78ab9..c1c4e9b7 100644 --- a/native/libcst/tests/fixtures/expr.py +++ b/native/libcst/tests/fixtures/expr.py @@ -44,7 +44,6 @@ lambda a, b, c=True, *vararg, d=(v1 << 2), e='str', **kwargs : a + b manylambdas = lambda x=lambda y=lambda z=1: z: y(): x() foo = (lambda port_id, ignore_missing: {"port1": port1_resource, "port2": port2_resource}[port_id]) 1 if True else 2 -_ if 0else _ str or None if True else str or bytes or None (str or None) if True else (str or bytes or None) str or None if (1 if True else 2) else str or bytes or None diff --git a/native/libcst/tests/fixtures/fun_with_func_defs.py b/native/libcst/tests/fixtures/fun_with_func_defs.py index 9e048a75..1a78d687 100644 --- a/native/libcst/tests/fixtures/fun_with_func_defs.py +++ b/native/libcst/tests/fixtures/fun_with_func_defs.py @@ -1,6 +1,4 @@ def f(a, /,): pass -def f(a, / ,): pass -def f(a, / ): pass def f(a, /, c, d, e): pass def f(a, /, c, *, d, e): pass def f(a, /, c, *, d, e, **kwargs): pass @@ -26,10 +24,6 @@ def f(a, /, c, d, e): def f(a, /, c, *, d, e): pass -def foo(a, * - , bar): - pass - def f( a, @@ -49,11 +43,6 @@ def f( ): pass -def say_hello( - self, user: str, / -): - print('Hello ' + user) - def f(a=1, /, b=2, c=4): pass diff --git a/native/libcst/tests/fixtures/malicious_match.py b/native/libcst/tests/fixtures/malicious_match.py index 54840022..cabf4689 100644 --- a/native/libcst/tests/fixtures/malicious_match.py +++ b/native/libcst/tests/fixtures/malicious_match.py @@ -36,7 +36,4 @@ match x: case Foo | Bar | ( Baz): pass case x,y , * more :pass case y.z: pass - case 1, 2: pass - case ( Foo ( ) ) : pass - case (lol) if ( True , ) :pass diff --git a/native/libcst/tests/fixtures/mixed_newlines.py b/native/libcst/tests/fixtures/mixed_newlines.py deleted file mode 100644 index 935a8b45..00000000 --- a/native/libcst/tests/fixtures/mixed_newlines.py +++ /dev/null @@ -1,3 +0,0 @@ -"" % { - 'test1': '', 'test2': '', -} diff --git a/native/libcst/tests/fixtures/pep646.py b/native/libcst/tests/fixtures/pep646.py deleted file mode 100644 index 6af0e6f1..00000000 --- a/native/libcst/tests/fixtures/pep646.py +++ /dev/null @@ -1,37 +0,0 @@ -# see https://github.com/python/cpython/pull/31018/files#diff-3f516b60719dd445d33225e4f316b36e85c9c51a843a0147349d11a005c55937 - -A[*b] -A[ * b ] -A[ * b , ] -A[*b] = 1 -del A[*b] - -A[* b , * b] -A[ b, *b] -A[* b, b] -A[ * b,b, b] -A[b, *b, b] - -A[*A[b, *b, b], b] -A[b, ...] -A[*A[b, ...]] - -A[ * ( 1,2,3)] -A[ * [ 1,2,3]] - -A[1:2, *t] -A[1:, *t, 1:2] -A[:, *t, :] -A[*t, :, *t] - -A[* returns_list()] -A[*returns_list(), * returns_list(), b] - -def f1(*args: *b): pass -def f2(*args: *b, arg1): pass -def f3(*args: *b, arg1: int): pass -def f4(*args: *b, arg1: int = 1): pass - -def f(*args: *tuple[int, ...]): pass -def f(*args: *tuple[int, *Ts]): pass -def f() -> tuple[int, *tuple[int, ...]]: pass \ No newline at end of file diff --git a/native/libcst/tests/fixtures/super_strings.py b/native/libcst/tests/fixtures/super_strings.py index 369270ab..76b1b2dd 100644 --- a/native/libcst/tests/fixtures/super_strings.py +++ b/native/libcst/tests/fixtures/super_strings.py @@ -26,33 +26,3 @@ _ = f"something {{**not** an expression}} {but(this._is)} {{and this isn't.}} en _(f"ok { expr = !r: aosidjhoi } end") print(f"{self.ERASE_CURRENT_LINE}{self._human_seconds(elapsed_time)} {percent:.{self.pretty_precision}f}% complete, {self.estimate_completion(elapsed_time, finished, left)} estimated for {left} files to go...") - -f"{"\n".join()}" - -f"___{ - x -}___" - -f"___{( - x -)}___" - -f'\{{\}}' -f"regexp_like(path, '.*\{file_type}$')" -f"\lfoo" - -f"{_:{_:}{a}}" - -f"foo {f"bar {x}"} baz" -f'some words {a+b:.3f} more words {c+d=} final words' -f"{'':*^{1:{1}}}" -f"{'':*^{1:{1:{1}}}}" -f"{f"{f"{f"{f"{f"{1+1}"}"}"}"}"}" - - -t'Nothing to see here, move along' -t"User {action}: {amount:.2f} {item}" -t"

HTML is code {too}

" -t"value={value!r}" -t"This wrinkles my brain {value:.{precision}f}" -_ = t"everything" + t" is {tstrings}" diff --git a/native/libcst/tests/fixtures/terrible_tries.py b/native/libcst/tests/fixtures/terrible_tries.py index eb5429cc..91d6831e 100644 --- a/native/libcst/tests/fixtures/terrible_tries.py +++ b/native/libcst/tests/fixtures/terrible_tries.py @@ -69,25 +69,3 @@ except foo: pass #9 - -try: - pass -except (foo, bar): - pass - -try: - pass -except foo, bar: - pass - -try: - pass -except (foo, bar), baz: - pass -else: - pass - -try: - pass -except* something, somethingelse: - pass \ No newline at end of file diff --git a/native/libcst/tests/fixtures/trailing_whitespace.py b/native/libcst/tests/fixtures/trailing_whitespace.py deleted file mode 100644 index 5a01c197..00000000 --- a/native/libcst/tests/fixtures/trailing_whitespace.py +++ /dev/null @@ -1,5 +0,0 @@ - - -x = 42 -print(x) - \ No newline at end of file diff --git a/native/libcst/tests/fixtures/tuple_shenanigans.py b/native/libcst/tests/fixtures/tuple_shenanigans.py index 136d79d2..f31c6452 100644 --- a/native/libcst/tests/fixtures/tuple_shenanigans.py +++ b/native/libcst/tests/fixtures/tuple_shenanigans.py @@ -4,8 +4,6 @@ # alright here we go. () -(()) -(((())), ()) ( # evil >:) # evil >:( ) # ... diff --git a/native/libcst/tests/fixtures/type_parameters.py b/native/libcst/tests/fixtures/type_parameters.py deleted file mode 100644 index ef6a39eb..00000000 --- a/native/libcst/tests/fixtures/type_parameters.py +++ /dev/null @@ -1,69 +0,0 @@ -# fmt: off - -type TA = int - -type TA1[A] = lambda A: A - -class Outer[A]: - type TA1[A] = None - -type TA1[A, B] = dict[A, B] - -class Outer[A]: - def inner[B](self): - type TA1[C] = TA1[A, B] | int - return TA1 - -def more_generic[T, *Ts, **P](): - type TA[T2, *Ts2, **P2] = tuple[Callable[P, tuple[T, *Ts]], Callable[P2, tuple[T2, *Ts2]]] - return TA - -type Recursive = Recursive - -def func[A](A): return A - -class ClassA: - def func[__A](self, __A): return __A - -class ClassA[A, B](dict[A, B]): - ... - -class ClassA[A]: - def funcB[B](self): - class ClassC[C]: - def funcD[D](self): - return lambda: (A, B, C, D) - return ClassC - -class Child[T](Base[lambda: (int, outer_var, T)]): ... - -type Alias[T: ([T for T in (T, [1])[1]], T)] = [T for T in T.__name__] -type Alias[T: [lambda: T for T in (T, [1])[1]]] = [lambda: T for T in T.__name__] - -class Foo[T: Foo, U: (Foo, Foo)]: - pass - -def func[T](a: T = "a", *, b: T = "b"): - return (a, b) - -def func1[A: str, B: str | int, C: (int, str)](): - return (A, B, C) - -type A [ T , * V ] =foo;type B=A - -def AAAAAAAAAAAAAAAAAA [ T : int ,*Ts , ** TT ] ():pass -class AAAAAAAAAAAAAAAAAA [ T : int ,*Ts , ** TT ] :pass - -def yikes[A:int,*B,**C](*d:*tuple[A,*B,...])->A:pass - -def func[T=int, **U=float, *V=None](): pass - -class C[T=int, **U=float, *V=None]: pass - -type Alias[T = int, **U = float, *V = None] = int - -default = tuple[int, str] -type Alias[*Ts = *default] = Ts -type Foo[ * T = * default ] = int -type Foo[*T=*default ]=int -type Foo [ * T = * default ] = int \ No newline at end of file diff --git a/native/libcst/tests/fixtures/with_wickedness.py b/native/libcst/tests/fixtures/with_wickedness.py index ee6ff7b6..7cb5c67d 100644 --- a/native/libcst/tests/fixtures/with_wickedness.py +++ b/native/libcst/tests/fixtures/with_wickedness.py @@ -1,52 +1,13 @@ # with_wickedness -with foo : - pass - -with foo, bar: - pass - -with (foo, bar): - pass - -with (foo, bar,): - pass - -with foo, bar as bar: - pass - -with (foo, bar as bar): - pass - -with (foo, bar as bar,): - pass +with foo : ... async def f(): - async with foo: + async with foo as bar: with bar: pass - async with foo : - pass - - async with foo, bar: - pass - - async with (foo, bar): - pass - - async with (foo, bar,): - pass - - async with foo, bar as bar: - pass - - async with (foo, bar as bar): - pass - - async with (foo, bar as bar,): - pass - async with foo(1+1) as bar , 1 as (a, b, ) , 2 as [a, b] , 3 as a[b] : pass + diff --git a/native/libcst/tests/fixtures/wonky_walrus.py b/native/libcst/tests/fixtures/wonky_walrus.py index d506b169..d0916ab8 100644 --- a/native/libcst/tests/fixtures/wonky_walrus.py +++ b/native/libcst/tests/fixtures/wonky_walrus.py @@ -10,6 +10,4 @@ while f := x(): if f := x(): pass f(y:=1) -f(x, y := 1 ) - -_[_:=10] \ No newline at end of file +f(x, y := 1 ) \ No newline at end of file diff --git a/native/libcst/tests/parser_roundtrip.rs b/native/libcst/tests/parser_roundtrip.rs index 7618eaec..b16a46f0 100644 --- a/native/libcst/tests/parser_roundtrip.rs +++ b/native/libcst/tests/parser_roundtrip.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst_derive/Cargo.toml b/native/libcst_derive/Cargo.toml index bf9959ab..95bf4d2d 100644 --- a/native/libcst_derive/Cargo.toml +++ b/native/libcst_derive/Cargo.toml @@ -1,19 +1,11 @@ [package] name = "libcst_derive" -version = "1.8.6" +version = "0.1.0" edition = "2018" -description = "Proc macro helpers for libcst." -license = "MIT" -repository = "https://github.com/Instagram/LibCST" -documentation = "https://libcst.rtfd.org" -keywords = ["macros", "python"] [lib] proc-macro = true [dependencies] -syn = "2.0" +syn = "1.0" quote = "1.0" - -[dev-dependencies] -trybuild = "1.0" diff --git a/native/libcst_derive/LICENSE b/native/libcst_derive/LICENSE deleted file mode 100644 index 5594616f..00000000 --- a/native/libcst_derive/LICENSE +++ /dev/null @@ -1,102 +0,0 @@ -All contributions towards LibCST are MIT licensed. - -Some Python files have been derived from the standard library and are therefore -PSF licensed. Modifications on these files are dual licensed (both MIT and -PSF). These files are: - -- libcst/_parser/base_parser.py -- libcst/_parser/parso/utils.py -- libcst/_parser/parso/pgen2/generator.py -- libcst/_parser/parso/pgen2/grammar_parser.py -- libcst/_parser/parso/python/py_token.py -- libcst/_parser/parso/python/tokenize.py -- libcst/_parser/parso/tests/test_fstring.py -- libcst/_parser/parso/tests/test_tokenize.py -- libcst/_parser/parso/tests/test_utils.py -- native/libcst/src/tokenizer/core/mod.rs -- native/libcst/src/tokenizer/core/string_types.rs - -Some Python files have been taken from dataclasses and are therefore Apache -licensed. Modifications on these files are licensed under Apache 2.0 license. -These files are: - -- libcst/_add_slots.py - -------------------------------------------------------------------------------- - -MIT License - -Copyright (c) Meta Platforms, Inc. and affiliates. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -------------------------------------------------------------------------------- - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved" -are retained in Python alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - -------------------------------------------------------------------------------- - -APACHE LICENSE, VERSION 2.0 - -http://www.apache.org/licenses/LICENSE-2.0 diff --git a/native/libcst_derive/src/codegen.rs b/native/libcst_derive/src/codegen.rs index 7dee8adc..4fce05df 100644 --- a/native/libcst_derive/src/codegen.rs +++ b/native/libcst_derive/src/codegen.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree diff --git a/native/libcst_derive/src/cstnode.rs b/native/libcst_derive/src/cstnode.rs deleted file mode 100644 index cdb6947e..00000000 --- a/native/libcst_derive/src/cstnode.rs +++ /dev/null @@ -1,447 +0,0 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. -// -// This source code is licensed under the MIT license found in the -// LICENSE file in the root directory of this source tree - -use proc_macro::TokenStream; -use quote::{format_ident, quote, quote_spanned, ToTokens}; -use syn::{ - self, - parse::{Parse, ParseStream}, - parse_quote, - punctuated::{Pair, Punctuated}, - spanned::Spanned, - token::Comma, - AngleBracketedGenericArguments, Attribute, Data, DataEnum, DataStruct, DeriveInput, Field, - Fields, FieldsNamed, FieldsUnnamed, GenericArgument, Generics, Ident, Meta, Path, - PathArguments, PathSegment, Token, Type, TypePath, Visibility, -}; - -pub(crate) struct CSTNodeParams { - traits: Punctuated, -} - -#[derive(PartialEq, Eq)] -enum SupportedTrait { - ParenthesizedNode, - Codegen, - Inflate, - NoIntoPy, - Default, -} - -pub(crate) fn impl_cst_node(ast: DeriveInput, args: CSTNodeParams) -> TokenStream { - match ast.data { - Data::Enum(e) => impl_enum(args, ast.attrs, ast.vis, ast.ident, ast.generics, e), - Data::Struct(s) => impl_struct(args, ast.attrs, ast.vis, ast.ident, ast.generics, s), - Data::Union(u) => quote_spanned! { - u.union_token.span() => - compile_error!("Union type is not supported") - } - .into(), - } -} - -impl CSTNodeParams { - fn has_trait(&self, treyt: &SupportedTrait) -> bool { - self.traits.iter().any(|x| x == treyt) - } -} - -impl Parse for SupportedTrait { - fn parse(input: ParseStream) -> syn::Result { - if input.peek(Ident) { - let id: Ident = input.parse()?; - return match id.to_string().as_str() { - "ParenthesizedNode" => Ok(Self::ParenthesizedNode), - "Codegen" => Ok(Self::Codegen), - "Inflate" => Ok(Self::Inflate), - "NoIntoPy" => Ok(Self::NoIntoPy), - "Default" => Ok(Self::Default), - _ => Err(input.error("Not a supported trait to derive for cst_node")), - }; - } - Err(input.error("Pass in trait names to be derived")) - } -} - -impl Parse for CSTNodeParams { - fn parse(input: ParseStream) -> syn::Result { - Ok(Self { - traits: input.parse_terminated(SupportedTrait::parse, Token![,])?, - }) - } -} - -// enum Foo<'a> { -// Variant(Box>), -// } -// => -// enum Foo<'a> { -// Variant(Box>), -// } -// enum DeflatedFoo<'r, 'a> { -// Variant(Box>), -// } - -fn impl_enum( - args: CSTNodeParams, - mut attrs: Vec, - vis: Visibility, - ident: Ident, - generics: Generics, - mut e: DataEnum, -) -> TokenStream { - let deflated_vis = vis.clone(); - let deflated_ident = format_ident!("Deflated{}", &ident); - let deflated_generics: Generics = parse_quote!(<'r, 'a>); - let mut deflated_variant_tokens = vec![]; - - for var in e.variants.iter_mut() { - let (inflated_fields, deflated_fields) = impl_fields(var.fields.clone()); - var.fields = deflated_fields; - deflated_variant_tokens.push(var.to_token_stream()); - var.fields = inflated_fields; - } - add_inflated_attrs(&args, &mut attrs); - let inflated = DeriveInput { - attrs, - vis, - ident, - generics, - data: Data::Enum(e), - }; - - let deflated_attrs = get_deflated_attrs(&args); - - let gen = quote! { - #[derive(Debug, PartialEq, Eq, Clone)] - #inflated - - #[derive(Debug, PartialEq, Eq, Clone)] - #(#deflated_attrs)* - #deflated_vis enum #deflated_ident#deflated_generics { - #(#deflated_variant_tokens,)* - } - }; - gen.into() -} - -fn get_deflated_attrs(args: &CSTNodeParams) -> Vec { - let mut deflated_attrs: Vec = vec![]; - if args.has_trait(&SupportedTrait::Inflate) { - deflated_attrs.push(parse_quote!(#[derive(Inflate)])); - } - if args.has_trait(&SupportedTrait::ParenthesizedNode) { - deflated_attrs.push(parse_quote!(#[derive(ParenthesizedDeflatedNode)])) - } - if args.has_trait(&SupportedTrait::Default) { - deflated_attrs.push(parse_quote!(#[derive(Default)])); - } - deflated_attrs -} - -fn add_inflated_attrs(args: &CSTNodeParams, attrs: &mut Vec) { - if args.has_trait(&SupportedTrait::Codegen) { - attrs.push(parse_quote!(#[derive(Codegen)])); - } - if args.has_trait(&SupportedTrait::ParenthesizedNode) { - attrs.push(parse_quote!(#[derive(ParenthesizedNode)])); - } - if args.has_trait(&SupportedTrait::Default) { - attrs.push(parse_quote!(#[derive(Default)])); - } - if !args.has_trait(&SupportedTrait::NoIntoPy) { - attrs.push(parse_quote!(#[cfg_attr(feature = "py", derive(TryIntoPy))])); - } -} - -// pub struct Foo<'a> { -// pub bar: Bar<'a>, -// pub value: &'a str, -// pub whitespace_after: SimpleWhitespace<'a>, -// pub(crate) tok: Option, -// } -// => -// pub struct Foo<'a> { -// pub bar: Bar<'a>, -// pub value: &'a str, -// pub whitespace_after: SimpleWhitespace<'a>, -// } -// struct DeflatedFoo<'r, 'a> { -// pub bar: DeflatedBar<'r, 'a>, -// pub value: &'a str, -// pub tok: Option> -// } - -fn impl_struct( - args: CSTNodeParams, - mut attrs: Vec, - vis: Visibility, - ident: Ident, - generics: Generics, - mut s: DataStruct, -) -> TokenStream { - let deflated_vis = vis.clone(); - let deflated_ident = format_ident!("Deflated{}", &ident); - let deflated_generics: Generics = parse_quote!(<'r, 'a>); - - let (inflated_fields, deflated_fields) = impl_fields(s.fields); - s.fields = inflated_fields; - - add_inflated_attrs(&args, &mut attrs); - - let inflated = DeriveInput { - attrs, - vis, - ident, - generics, - data: Data::Struct(s), - }; - - let deflated_attrs = get_deflated_attrs(&args); - - let gen = quote! { - #[derive(Debug, PartialEq, Eq, Clone)] - #inflated - - #[derive(Debug, PartialEq, Eq, Clone)] - #(#deflated_attrs)* - #deflated_vis struct #deflated_ident#deflated_generics - #deflated_fields - - }; - gen.into() -} - -fn impl_fields(fields: Fields) -> (Fields, Fields) { - match &fields { - Fields::Unnamed(fs) => { - let deflated_fields = impl_unnamed_fields(fs.clone()); - (fields, Fields::Unnamed(deflated_fields)) - } - Fields::Named(fs) => impl_named_fields(fs.clone()), - Fields::Unit => (Fields::Unit, Fields::Unit), - } -} - -fn impl_unnamed_fields(mut deflated_fields: FieldsUnnamed) -> FieldsUnnamed { - let mut added_lifetime = false; - deflated_fields.unnamed = deflated_fields - .unnamed - .into_pairs() - .map(|pair| { - let (deflated, lifetime) = make_into_deflated(pair); - added_lifetime |= lifetime; - deflated - }) - .collect(); - - // Make sure all Deflated* types have 'r 'a lifetime params - if !added_lifetime { - deflated_fields.unnamed.push(parse_quote! { - std::marker::PhantomData<&'r &'a ()> - }); - } - deflated_fields -} - -fn impl_named_fields(mut fields: FieldsNamed) -> (Fields, Fields) { - let mut deflated_fields = fields.clone(); - let mut added_lifetime = false; - // Drop whitespace fields from deflated fields - // And add lifetimes to tokenref fields - deflated_fields.named = deflated_fields - .named - .into_pairs() - .filter(|pair| { - let id = pair.value().ident.as_ref().unwrap().to_string(); - !id.contains("whitespace") - && id != "footer" - && id != "header" - && id != "leading_lines" - && id != "lines_after_decorators" - }) - .map(|pair| { - if is_builtin(pair.value()) { - pair - } else { - let (deflated, lifetime) = make_into_deflated(pair); - added_lifetime |= lifetime; - deflated - } - }) - .map(|pair| { - let (mut val, punct) = pair.into_tuple(); - val.attrs = val.attrs.into_iter().filter(is_not_intopy_attr).collect(); - Pair::new(val, punct) - }) - .collect(); - - // Make sure all Deflated* types have 'r 'a lifetime params - if !added_lifetime { - deflated_fields.named.push(parse_quote! { - _phantom: std::marker::PhantomData<&'r &'a ()> - }); - } - - // Drop tokenref fields from inflated fields - fields.named = fields - .named - .into_pairs() - .filter(|pair| !is_token_ref(pair.value())) - .collect(); - - (Fields::Named(fields), Fields::Named(deflated_fields)) -} - -fn is_builtin(field: &Field) -> bool { - get_pathseg(&field.ty) - .map(|seg| { - let segstr = seg.ident.to_string(); - segstr == "str" || segstr == "bool" || segstr == "String" - }) - .unwrap_or_default() -} - -fn is_token_ref(field: &Field) -> bool { - if let Some(seg) = rightmost_path_segment(&field.ty) { - return format!("{}", seg.ident) == "TokenRef"; - } - false -} - -// foo::bar -> foo::Deflatedbar<'r, 'a> -fn make_into_deflated(mut pair: Pair) -> (Pair, bool) { - let mut added_lifetime = true; - if let Some(seg) = rightmost_path_segment_mut(&mut pair.value_mut().ty) { - let seg_name = seg.ident.to_string(); - if seg_name != "TokenRef" { - seg.ident = format_ident!("Deflated{}", seg_name); - } - match seg.arguments { - PathArguments::None => { - seg.arguments = PathArguments::AngleBracketed(parse_quote!(<'r, 'a>)); - } - PathArguments::AngleBracketed(AngleBracketedGenericArguments { - ref mut args, .. - }) => { - args.insert(0, parse_quote!('r)); - } - _ => todo!(), - } - } else { - added_lifetime = false; - } - (pair, added_lifetime) -} - -// foo::bar::baz> -> baz> -fn get_pathseg(ty: &Type) -> Option<&PathSegment> { - match ty { - Type::Path(TypePath { path, .. }) => path.segments.last(), - _ => None, - } -} - -// foo::bar::baz> -> quux<'a> -fn rightmost_path_segment(ty: &Type) -> Option<&PathSegment> { - let mut candidate = get_pathseg(ty); - loop { - if let Some(pathseg) = candidate { - if let PathArguments::AngleBracketed(AngleBracketedGenericArguments { args, .. }) = - &pathseg.arguments - { - if let Some(GenericArgument::Type(t)) = args.last() { - candidate = get_pathseg(t); - continue; - } - } - } - break; - } - candidate -} - -fn get_pathseg_mut(ty: &mut Type) -> Option<&mut PathSegment> { - match ty { - Type::Path(TypePath { path, .. }) => path.segments.last_mut(), - _ => None, - } -} - -fn has_more_mut(candidate: &Option<&mut PathSegment>) -> bool { - if let Some(PathArguments::AngleBracketed(AngleBracketedGenericArguments { - ref args, .. - })) = candidate.as_ref().map(|c| &c.arguments) - { - matches!(args.last(), Some(GenericArgument::Type(_))) - } else { - false - } -} - -fn rightmost_path_segment_mut(ty: &mut Type) -> Option<&mut PathSegment> { - let mut candidate = get_pathseg_mut(ty); - - while has_more_mut(&candidate) { - candidate = match candidate.unwrap().arguments { - PathArguments::AngleBracketed(AngleBracketedGenericArguments { - ref mut args, .. - }) => { - if let Some(GenericArgument::Type(t)) = args.last_mut() { - get_pathseg_mut(t) - } else { - unreachable!(); - } - } - _ => unreachable!(), - }; - } - - candidate -} - -fn is_not_intopy_attr(attr: &Attribute) -> bool { - let path = attr.path(); - // support #[cfg_attr(feature = "py", skip_py)] - if path.is_ident("cfg_attr") { - return match attr.parse_args_with(|input: ParseStream| { - let _: Meta = input.parse()?; - let _: Token![,] = input.parse()?; - let nested_path: Path = input.parse()?; - let _: Option = input.parse()?; - Ok(nested_path) - }) { - Ok(nested_path) => !is_intopy_attr_path(&nested_path), - Err(_) => false, - }; - } - !is_intopy_attr_path(path) -} - -fn is_intopy_attr_path(path: &Path) -> bool { - path.is_ident("skip_py") || path.is_ident("no_py_default") -} - -#[test] -fn trybuild() { - let t = trybuild::TestCases::new(); - t.pass("tests/pass/*.rs"); -} - -#[test] -fn test_is_not_intopy_attr() { - assert!(!is_not_intopy_attr(&parse_quote!(#[skip_py]))); - assert!(!is_not_intopy_attr(&parse_quote!(#[no_py_default]))); - assert!(!is_not_intopy_attr( - &parse_quote!(#[cfg_attr(foo="bar",skip_py)]) - )); - assert!(!is_not_intopy_attr( - &parse_quote!(#[cfg_attr(foo="bar",no_py_default)]) - )); - assert!(is_not_intopy_attr(&parse_quote!(#[skippy]))); - assert!(is_not_intopy_attr( - &parse_quote!(#[cfg_attr(foo="bar",skippy)]) - )); -} diff --git a/native/libcst_derive/src/inflate.rs b/native/libcst_derive/src/inflate.rs index 9a166bdc..323160c1 100644 --- a/native/libcst_derive/src/inflate.rs +++ b/native/libcst_derive/src/inflate.rs @@ -1,10 +1,10 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree use proc_macro::TokenStream; -use quote::{format_ident, quote, quote_spanned}; +use quote::{quote, quote_spanned}; use syn::{self, spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; pub(crate) fn impl_inflate(ast: &DeriveInput) -> TokenStream { @@ -55,19 +55,11 @@ fn impl_inflate_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { } let ident = &ast.ident; let generics = &ast.generics; - let ident_str = ident.to_string(); - let inflated_ident = format_ident!( - "{}", - ident_str - .strip_prefix("Deflated") - .expect("Cannot implement Inflate on a non-Deflated item") - ); let gen = quote! { - impl#generics Inflate<'a> for #ident #generics { - type Inflated = #inflated_ident <'a>; - fn inflate(mut self, config: & crate::tokenizer::whitespace_parser::Config<'a>) -> std::result::Result { + impl<'a> Inflate<'a> for #ident #generics { + fn inflate(mut self, config: & crate::tokenizer::whitespace_parser::Config<'a>) -> std::result::Result { match self { - #(Self::#varnames(x) => Ok(Self::Inflated::#varnames(x.inflate(config)?)),)* + #(Self::#varnames(x) => Ok(Self::#varnames(x.inflate(config)?)),)* } } } diff --git a/native/libcst_derive/src/into_py.rs b/native/libcst_derive/src/into_py.rs index f5470aa1..2ba4160c 100644 --- a/native/libcst_derive/src/into_py.rs +++ b/native/libcst_derive/src/into_py.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree @@ -38,15 +38,14 @@ fn impl_into_py_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { let kwargs_toks = fields_to_kwargs(&var.fields, true); toks.push(quote! { Self::#varname { #(#fieldnames,)* .. } => { - use pyo3::types::PyAnyMethods; - - let libcst = pyo3::types::PyModule::import(py, "libcst")?; + let libcst = pyo3::types::PyModule::import(py, "libcst").expect("libcst couldn't be imported"); let kwargs = #kwargs_toks ; - Ok(libcst + libcst .getattr(stringify!(#varname)) .expect(stringify!(no #varname found in libcst)) - .call((), Some(&kwargs))? - .into()) + .call((), Some(kwargs)) + .expect(stringify!(conversion failed for #varname)) + .into() } }) } @@ -59,7 +58,7 @@ fn impl_into_py_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { } Fields::Unnamed(_) => { toks.push(quote! { - Self::#varname(x, ..) => x.try_into_py(py), + Self::#varname(x, ..) => x.into_py(py), }); } } @@ -69,8 +68,8 @@ fn impl_into_py_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { let gen = quote! { use pyo3::types::IntoPyDict as _; #[automatically_derived] - impl#generics crate::nodes::traits::py::TryIntoPy for #ident #generics { - fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { + impl#generics pyo3::conversion::IntoPy for #ident #generics { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { match self { #(#toks)* } @@ -87,16 +86,16 @@ fn impl_into_py_struct(ast: &DeriveInput, e: &DataStruct) -> TokenStream { let gen = quote! { use pyo3::types::IntoPyDict as _; #[automatically_derived] - impl#generics crate::nodes::traits::py::TryIntoPy for #ident #generics { - fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult { - use pyo3::types::PyAnyMethods; - let libcst = pyo3::types::PyModule::import(py, "libcst")?; + impl#generics pyo3::conversion::IntoPy for #ident #generics { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + let libcst = pyo3::types::PyModule::import(py, "libcst").expect("libcst couldn't be imported"); let kwargs = #kwargs_toks ; - Ok(libcst + libcst .getattr(stringify!(#ident)) .expect(stringify!(no #ident found in libcst)) - .call((), Some(&kwargs))? - .into()) + .call((), Some(kwargs)) + .expect(stringify!(conversion failed for #ident)) + .into() } } }; @@ -144,7 +143,7 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt } } } - empty_kwargs = py_varnames.is_empty() && optional_py_varnames.is_empty() + empty_kwargs = py_varnames.is_empty() && optional_py_varnames.is_empty(); } Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => { if unnamed.first().is_some() { @@ -159,10 +158,10 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt } }; let kwargs_pairs = quote! { - #(Some((stringify!(#py_varnames), #rust_varnames.try_into_py(py)?)),)* + #(Some((stringify!(#py_varnames), #rust_varnames.into_py(py))),)* }; let optional_pairs = quote! { - #(#optional_rust_varnames.map(|x| x.try_into_py(py)).transpose()?.map(|x| (stringify!(#optional_py_varnames), x)),)* + #(#optional_rust_varnames.map(|x| (stringify!(#optional_py_varnames), x.into_py(py))),)* }; if empty_kwargs { quote! { pyo3::types::PyDict::new(py) } @@ -173,11 +172,11 @@ fn fields_to_kwargs(fields: &Fields, is_enum: bool) -> quote::__private::TokenSt .filter(|x| x.is_some()) .map(|x| x.as_ref().unwrap()) .collect::>() - .into_py_dict(py)? + .into_py_dict(py) } } } fn has_attr(attrs: &[Attribute], name: &'static str) -> bool { - attrs.iter().any(|attr| attr.path().is_ident(name)) + attrs.iter().any(|attr| attr.path.is_ident(name)) } diff --git a/native/libcst_derive/src/lib.rs b/native/libcst_derive/src/lib.rs index b7aafeba..97d1e321 100644 --- a/native/libcst_derive/src/lib.rs +++ b/native/libcst_derive/src/lib.rs @@ -1,4 +1,4 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree @@ -11,11 +11,8 @@ mod codegen; use codegen::impl_codegen; mod into_py; use into_py::impl_into_py; -mod cstnode; -use cstnode::{impl_cst_node, CSTNodeParams}; use proc_macro::TokenStream; -use syn::{parse_macro_input, DeriveInput}; #[proc_macro_derive(Inflate)] pub fn inflate_derive(input: TokenStream) -> TokenStream { @@ -25,26 +22,15 @@ pub fn inflate_derive(input: TokenStream) -> TokenStream { #[proc_macro_derive(ParenthesizedNode)] pub fn parenthesized_node_derive(input: TokenStream) -> TokenStream { - impl_parenthesized_node(&syn::parse(input).unwrap(), false) -} - -#[proc_macro_derive(ParenthesizedDeflatedNode)] -pub fn parenthesized_deflated_node_derive(input: TokenStream) -> TokenStream { - impl_parenthesized_node(&syn::parse(input).unwrap(), true) + impl_parenthesized_node(&syn::parse(input).unwrap()) } #[proc_macro_derive(Codegen)] -pub fn codegen_derive(input: TokenStream) -> TokenStream { +pub fn parenthesized_node_codegen(input: TokenStream) -> TokenStream { impl_codegen(&syn::parse(input).unwrap()) } -#[proc_macro_derive(TryIntoPy, attributes(skip_py, no_py_default))] +#[proc_macro_derive(IntoPy, attributes(skip_py, no_py_default))] pub fn into_py(input: TokenStream) -> TokenStream { impl_into_py(&syn::parse(input).unwrap()) } - -#[proc_macro_attribute] -pub fn cst_node(args: TokenStream, input: TokenStream) -> TokenStream { - let args = parse_macro_input!(args as CSTNodeParams); - impl_cst_node(parse_macro_input!(input as DeriveInput), args) -} diff --git a/native/libcst_derive/src/parenthesized_node.rs b/native/libcst_derive/src/parenthesized_node.rs index edc4b380..fe716510 100644 --- a/native/libcst_derive/src/parenthesized_node.rs +++ b/native/libcst_derive/src/parenthesized_node.rs @@ -1,18 +1,16 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. +// Copyright (c) Meta Platforms, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree use proc_macro::TokenStream; use quote::{quote, quote_spanned}; -use syn::{ - parse_quote, spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed, Ident, -}; +use syn::{spanned::Spanned, Data, DataEnum, DeriveInput, Fields, FieldsUnnamed}; -pub(crate) fn impl_parenthesized_node(ast: &DeriveInput, deflated: bool) -> TokenStream { +pub(crate) fn impl_parenthesized_node(ast: &DeriveInput) -> TokenStream { match &ast.data { - Data::Enum(e) => impl_enum(ast, e, deflated), - Data::Struct(_) => impl_struct(ast, deflated), + Data::Enum(e) => impl_enum(ast, e), + Data::Struct(_) => impl_struct(ast), Data::Union(u) => quote_spanned! { u.union_token.span() => compile_error!("Union type is not supported") @@ -21,43 +19,18 @@ pub(crate) fn impl_parenthesized_node(ast: &DeriveInput, deflated: bool) -> Toke } } -fn idents(deflated: bool) -> (Ident, Ident, Ident) { - let treyt: Ident = if deflated { - parse_quote!(ParenthesizedDeflatedNode) - } else { - parse_quote!(ParenthesizedNode) - }; - let leftparen: Ident = if deflated { - parse_quote!(DeflatedLeftParen) - } else { - parse_quote!(LeftParen) - }; - let rightparen: Ident = if deflated { - parse_quote!(DeflatedRightParen) - } else { - parse_quote!(RightParen) - }; - (treyt, leftparen, rightparen) -} - -fn impl_struct(ast: &DeriveInput, deflated: bool) -> TokenStream { +fn impl_struct(ast: &DeriveInput) -> TokenStream { let ident = &ast.ident; - let generics = if deflated { - parse_quote!(<'r, 'a>) - } else { - ast.generics.clone() - }; - - let (treyt, leftparen, rightparen) = idents(deflated); + let generics = &ast.generics; let gen = quote! { - impl#generics #treyt#generics for #ident #generics { - fn lpar(&self) -> &Vec<#leftparen#generics> { + impl<'a> ParenthesizedNode<'a> for #ident #generics { + fn lpar(&self) -> &Vec> { &self.lpar } - fn rpar(&self) -> &Vec<#rightparen#generics> { + fn rpar(&self) -> &Vec> { &self.rpar } - fn with_parens(self, left: #leftparen#generics, right: #rightparen#generics) -> Self { + fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self { let mut lpar = self.lpar; let mut rpar = self.rpar; lpar.insert(0, left); @@ -70,7 +43,7 @@ fn impl_struct(ast: &DeriveInput, deflated: bool) -> TokenStream { gen.into() } -fn impl_enum(ast: &DeriveInput, e: &DataEnum, deflated: bool) -> TokenStream { +fn impl_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream { let mut varnames = vec![]; for var in e.variants.iter() { match &var.fields { @@ -101,25 +74,20 @@ fn impl_enum(ast: &DeriveInput, e: &DataEnum, deflated: bool) -> TokenStream { } } let ident = &ast.ident; - let generics = if deflated { - parse_quote!(<'r, 'a>) - } else { - ast.generics.clone() - }; - let (treyt, leftparen, rightparen) = idents(deflated); + let generics = &ast.generics; let gen = quote! { - impl#generics #treyt#generics for #ident #generics { - fn lpar(&self) -> &Vec<#leftparen#generics> { + impl<'a> ParenthesizedNode<'a> for #ident #generics { + fn lpar(&self) -> &Vec> { match self { #(Self::#varnames(x) => x.lpar(),)* } } - fn rpar(&self) -> &Vec<#rightparen#generics> { + fn rpar(&self) -> &Vec> { match self { #(Self::#varnames(x) => x.rpar(),)* } } - fn with_parens(self, left: #leftparen#generics, right: #rightparen#generics) -> Self { + fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self { match self { #(Self::#varnames(x) => Self::#varnames(x.with_parens(left, right)),)* } diff --git a/native/libcst_derive/tests/pass/minimal_cst.rs b/native/libcst_derive/tests/pass/minimal_cst.rs deleted file mode 100644 index 104b2e11..00000000 --- a/native/libcst_derive/tests/pass/minimal_cst.rs +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. -// -// This source code is licensed under the MIT license found in the -// LICENSE file in the root directory of this source tree - -use libcst_derive::{cst_node, Codegen}; - -pub enum Error {} - -type TokenRef<'r, 'a> = &'r &'a str; -pub type Result = std::result::Result; - -pub struct Config<'a> { - #[allow(dead_code)] - foo: &'a str, -} -pub trait Inflate<'a> -where - Self: Sized, -{ - type Inflated; - fn inflate(self, config: &Config<'a>) -> Result; -} - -impl<'a, T: Inflate<'a> + ?Sized> Inflate<'a> for Box { - type Inflated = Box; - fn inflate(self, config: &Config<'a>) -> Result { - match (*self).inflate(config) { - Ok(a) => Ok(Box::new(a)), - Err(e) => Err(e), - } - } -} - -pub struct CodegenState<'a> { - #[allow(dead_code)] - foo: &'a str, -} -pub trait Codegen<'a> { - fn codegen(&self, state: &mut CodegenState<'a>); -} - -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct WS<'a> { - pub last_line: &'a str, -} - -#[cst_node] -pub struct Parameters<'a> { - pub params: Vec>, - pub foo: Param<'a>, -} - -impl<'r, 'a> Inflate<'a> for DeflatedParameters<'r, 'a> { - type Inflated = Parameters<'a>; - fn inflate(self, config: &Config<'a>) -> Result { - let params = vec![]; - #[allow(clippy::blacklisted_name)] - let foo = self.foo.inflate(config)?; - Ok(Self::Inflated { params, foo }) - } -} - -#[cst_node] -pub struct Param<'a> { - pub star: Option<&'a str>, - pub(crate) star_tok: Option>, -} - -impl<'r, 'a> Inflate<'a> for DeflatedParam<'r, 'a> { - type Inflated = Param<'a>; - fn inflate(self, _config: &Config<'a>) -> Result { - Ok(Self::Inflated { star: self.star }) - } -} - -impl<'a> Codegen<'a> for Param<'a> { - fn codegen(&self, _state: &mut CodegenState<'a>) {} -} - -#[cst_node] -pub struct BitOr<'a> { - pub whitespace_before: WS<'a>, - pub whitespace_after: WS<'a>, - - pub(crate) tok: TokenRef<'a>, -} - -#[cst_node] -pub enum CompOp<'a> { - LessThan { - whitespace_before: WS<'a>, - tok: TokenRef<'a>, - }, - GreaterThan { - whitespace_after: WS<'a>, - tok: TokenRef<'a>, - }, -} - -impl<'r, 'a> Inflate<'a> for DeflatedCompOp<'r, 'a> { - type Inflated = CompOp<'a>; - fn inflate(self, _config: &Config<'a>) -> Result { - Ok(match self { - Self::LessThan { tok: _, .. } => Self::Inflated::LessThan { - whitespace_before: WS { last_line: "yo" }, - }, - Self::GreaterThan { tok: _, .. } => Self::Inflated::GreaterThan { - whitespace_after: WS { last_line: "" }, - }, - }) - } -} - -impl<'a> Codegen<'a> for CompOp<'a> { - fn codegen(&self, _state: &mut CodegenState<'a>) {} -} - -#[cst_node(Codegen)] -enum Expr<'a> { - #[allow(dead_code)] - One(Box>), - #[allow(dead_code)] - Two(CompOp<'a>), -} - -fn main() {} diff --git a/native/libcst_derive/tests/pass/simple.rs b/native/libcst_derive/tests/pass/simple.rs deleted file mode 100644 index 838f3914..00000000 --- a/native/libcst_derive/tests/pass/simple.rs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright (c) Meta Platforms, Inc. and affiliates. -// -// This source code is licensed under the MIT license found in the -// LICENSE file in the root directory of this source tree - -use libcst_derive::cst_node; - -#[derive(Debug, PartialEq, Eq, Clone)] -pub struct WS<'a>(&'a str); - -type TokenRef<'r, 'a> = &'r &'a str; - -#[cst_node] -pub enum Foo<'a> { - One(One<'a>), - Two(Box>), -} - -#[cst_node] -pub struct One<'a> { - pub two: Box>, - pub header: WS<'a>, - - pub(crate) newline_tok: TokenRef<'a>, -} - -#[cst_node] -pub struct Two<'a> { - pub whitespace_before: WS<'a>, - pub(crate) tok: TokenRef<'a>, -} - -#[cst_node] -struct Thin<'a> { - pub whitespace: WS<'a>, -} - -#[cst_node] -struct Value<'a> { - pub value: &'a str, -} - -#[cst_node] -struct Empty {} - -#[cst_node] -enum Smol<'a> { - #[allow(dead_code)] - Thin(Thin<'a>), - #[allow(dead_code)] - Empty(Empty), -} - -fn main() {} diff --git a/native/roundtrip.sh b/native/roundtrip.sh index c75241f7..3f732143 100755 --- a/native/roundtrip.sh +++ b/native/roundtrip.sh @@ -1,10 +1,5 @@ #!/bin/bash -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - PARSE=$(dirname $0)/target/release/parse exec diff -u "$1" <($PARSE < "$1") diff --git a/pyproject.toml b/pyproject.toml index f29b2474..1d33e75e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,131 +1,9 @@ -[build-system] -requires = ["setuptools", "setuptools-scm", "setuptools-rust", "wheel"] - -[project] -name = "libcst" -description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.14 programs." -readme = "README.rst" -dynamic = ["version"] -license = { file = "LICENSE" } -classifiers = [ - "License :: OSI Approved :: MIT License", - "Topic :: Software Development :: Libraries", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", - "Programming Language :: Python :: Free Threading", - "Typing :: Typed", -] -requires-python = ">=3.9" -dependencies = [ - "pyyaml>=5.2; python_version < '3.13'", - "pyyaml-ft>=8.0.0; python_version == '3.13'", - "pyyaml>=6.0.3; python_version >= '3.14'", - "typing-extensions; python_version < '3.10'", -] - -[project.urls] -Documentation = "https://libcst.readthedocs.io/en/latest/" -Github = "https://github.com/Instagram/LibCST" -Changelog = "https://github.com/Instagram/LibCST/blob/main/CHANGELOG.md" - -[dependency-groups] -dev = [ - "black==25.1.0", - "coverage[toml]>=4.5.4", - "build>=0.10.0", - "fixit==2.1.0", - "flake8==7.2.0", - "hypothesis>=4.36.0", - "hypothesmith>=0.0.4", - "maturin>=1.7.0,<1.8", - "poethepoet>=0.35.0", - "prompt-toolkit>=2.0.9", - "pyre-check==0.9.18; platform_system != 'Windows'", - "setuptools_scm>=6.0.1", - "ufmt==2.8.0", - "usort==1.0.8.post1", - "setuptools-rust>=1.5.2", - "slotscheck>=0.7.1", -] -docs = [ - {include-group = "dev"}, - "Sphinx>=5.1.1", - "sphinx-rtd-theme>=0.4.3", - "jupyter>=1.0.0", - "nbsphinx>=0.4.2", - "jinja2==3.1.6", -] - [tool.black] -target-version = ["py39"] -extend-exclude = '^/native/' # Prepend "^/" to specify root file/folder. See https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-format - -[tool.coverage.report] -fail_under = 93 -precision = 1 -show_missing = true -skip_covered = true -omit = ["*/_parser/*"] # temporary while I remove the parser - -[tool.uv] -cache-keys = [ - { file = "pyproject.toml" }, - { git = {commit = true, tags = true}}, - { file = "**/*.rs"}, - { file = "**/Cargo.toml"}, - { file = "**/Cargo.lock"}, -] - -[tool.poe.tasks] -fixtures = ["regenerate-fixtures", "_assert_no_changes"] -regenerate-fixtures = "python scripts/regenerate-fixtures.py" -_assert_no_changes = "git diff --exit-code" - -format = "ufmt format libcst scripts" -_flake8 = "flake8 libcst" -_ufmt = "ufmt check libcst scripts" -_slotscheck = "python -m slotscheck libcst" -_check_copyright = "python scripts/check_copyright.py" -lint = ["_flake8", "_ufmt", "_slotscheck", "_check_copyright"] -test = "python -m coverage run -m libcst.tests" -typecheck = "pyre check" -docs = "sphinx-build -ab html docs/source docs/build" - -[tool.slotscheck] -exclude-modules = '^libcst\.(testing|tests)' +target-version = ["py36"] +exclude = "native/.*" [tool.ufmt] excludes = ["native/", "stubs/"] -[tool.cibuildwheel] -build-verbosity = 1 -environment = { PATH = "$PATH:$HOME/.cargo/bin", LIBCST_NO_LOCAL_SCHEME="1" } -skip = [ - "pp*", - "*-win32", - "*-musllinux_i686", - "*-musllinux_ppc64le", - "*-musllinux_s390x", - "*-musllinux_armv7l", -] -enable = ["cpython-freethreading"] -test-command = [ - "python --version", - "python -m libcst.tool list", - # TODO: remove the gil once thread-safety issues are resolved - "python -X gil=1 -m libcst.tool codemod remove_unused_imports.RemoveUnusedImportsCommand {project}/libcst/_nodes", -] - -[tool.cibuildwheel.linux] -environment-pass = ["LIBCST_NO_LOCAL_SCHEME"] -before-all = "yum install -y libatomic; curl https://sh.rustup.rs -sSf | env -u CARGO_HOME sh -s -- --default-toolchain stable --profile minimal -y" - -[tool.cibuildwheel.macos] -before-all = "rustup target add aarch64-apple-darwin x86_64-apple-darwin" - -[tool.cibuildwheel.windows] -before-all = "rustup target add x86_64-pc-windows-msvc i686-pc-windows-msvc aarch64-pc-windows-msvc" +[build-system] +requires = ["setuptools", "wheel", "setuptools-rust"] \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 00000000..388c7556 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,17 @@ +black==21.10b0 +coverage>=4.5.4 +fixit==0.1.1 +flake8>=3.7.8 +git+https://github.com/jimmylai/sphinx.git@slots_type_annotation +hypothesis>=4.36.0 +hypothesmith>=0.0.4 +jupyter>=1.0.0 +maturin>=0.8.3,<0.9 +nbsphinx>=0.4.2 +prompt-toolkit>=2.0.9 +pyre-check==0.9.9; platform_system != "Windows" +setuptools_scm>=6.0.1 +sphinx-rtd-theme>=0.4.3 +ufmt==1.3 +usort==1.0.0rc1 +setuptools-rust>=0.12.1 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..cf8ff05c --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +dataclasses>=0.6.0; python_version < '3.7' +typing_extensions>=3.7.4.2 +typing_inspect>=0.4.0 +pyyaml>=5.2 diff --git a/scripts/check_copyright.py b/scripts/check_copyright.py deleted file mode 100644 index 47d90ec6..00000000 --- a/scripts/check_copyright.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -import re -import sys -from pathlib import Path -from subprocess import run -from typing import Iterable, List, Pattern - -# Use the copyright header from this file as the benchmark for all files -EXPECTED_HEADER: str = "\n".join( - line for line in Path(__file__).read_text().splitlines()[:4] -) - -EXCEPTION_PATTERNS: List[Pattern[str]] = [ - re.compile(pattern) - for pattern in ( - r"^native/libcst/tests/fixtures/", - r"^libcst/_add_slots\.py$", - r"^libcst/tests/test_(e2e|fuzz)\.py$", - r"^libcst/_parser/base_parser\.py$", - r"^libcst/_parser/parso/utils\.py$", - r"^libcst/_parser/parso/pgen2/(generator|grammar_parser)\.py$", - r"^libcst/_parser/parso/python/(py_token|tokenize)\.py$", - r"^libcst/_parser/parso/tests/test_(fstring|tokenize|utils)\.py$", - ) -] - - -def tracked_files() -> Iterable[Path]: - proc = run( - ["git", "ls-tree", "-r", "--name-only", "HEAD"], - check=True, - capture_output=True, - encoding="utf-8", - ) - yield from ( - path - for line in proc.stdout.splitlines() - if not any(pattern.search(line) for pattern in EXCEPTION_PATTERNS) - if (path := Path(line)) and path.is_file() and path.suffix in (".py", ".sh") - ) - - -def main() -> None: - error = False - for path in tracked_files(): - content = path.read_text("utf-8") - if EXPECTED_HEADER not in content: - print(f"Missing or incomplete copyright in {path}") - error = True - sys.exit(1 if error else 0) - - -if __name__ == "__main__": - main() diff --git a/scripts/regenerate-fixtures.py b/scripts/regenerate-fixtures.py deleted file mode 100644 index 2b67b304..00000000 --- a/scripts/regenerate-fixtures.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under the MIT license found in the -# LICENSE file in the root directory of this source tree. - -""" -Regenerate test fixtures, eg. after upgrading Pyre -""" - -import json -import os -from pathlib import Path -from subprocess import run - -from libcst.metadata import TypeInferenceProvider - - -def main() -> None: - CWD = Path.cwd() - repo_root = Path(__file__).parent.parent - test_root = repo_root / "libcst" / "tests" / "pyre" - - try: - os.chdir(test_root) - run(["pyre", "-n", "start", "--no-watchman"], check=True) - - for file_path in test_root.glob("*.py"): - json_path = file_path.with_suffix(".json") - print(f"generating {file_path} -> {json_path}") - - path_str = file_path.as_posix() - cache = TypeInferenceProvider.gen_cache(test_root, [path_str], timeout=None) - result = cache[path_str] - json_path.write_text(json.dumps(result, sort_keys=True, indent=2)) - - finally: - run(["pyre", "-n", "stop"], check=True) - os.chdir(CWD) - - -if __name__ == "__main__": - main() diff --git a/setup.py b/setup.py index 12180cbc..0418cf26 100644 --- a/setup.py +++ b/setup.py @@ -3,18 +3,24 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from os import environ + +from os import environ, path import setuptools from setuptools_rust import Binding, RustExtension +# Grab the readme so that our package stays in sync with github. +this_directory: str = path.abspath(path.dirname(__file__)) +with open(path.join(this_directory, "README.rst"), encoding="utf-8") as f: + long_description: str = f.read() + + def no_local_scheme(version: str) -> str: return "" setuptools.setup( - setup_requires=["setuptools-rust", "setuptools_scm"], use_scm_version={ "write_to": "libcst/_version.py", **( @@ -23,6 +29,12 @@ setuptools.setup( else {} ), }, + name="libcst", + description="A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7 and 3.8 programs.", + long_description=long_description, + long_description_content_type="text/x-rst", + url="https://github.com/Instagram/LibCST", + license="MIT", packages=setuptools.find_packages(), package_data={ "libcst": ["py.typed"], @@ -30,6 +42,16 @@ setuptools.setup( "libcst.codemod.tests": ["*"], }, test_suite="libcst", + python_requires=">=3.6", + setup_requires=["setuptools_scm"], + install_requires=[dep.strip() for dep in open("requirements.txt").readlines()], + extras_require={ + "dev": [ + dep.strip() + for dep in open("requirements-dev.txt").readlines() + if "=" in dep + ], + }, rust_extensions=[ RustExtension( "libcst.native", @@ -37,5 +59,12 @@ setuptools.setup( binding=Binding.PyO3, ) ], + classifiers=[ + "License :: OSI Approved :: MIT License", + "Topic :: Software Development :: Libraries", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + ], zip_safe=False, # for mypy compatibility https://mypy.readthedocs.io/en/latest/installed_packages.html ) diff --git a/stubs/hypothesis.pyi b/stubs/hypothesis.pyi index a8c27975..0568b4d1 100644 --- a/stubs/hypothesis.pyi +++ b/stubs/hypothesis.pyi @@ -1,5 +1 @@ -# pyre-unsafe - -from typing import Any - -def __getattr__(name: str) -> Any: ... +# pyre-placeholder-stub diff --git a/stubs/hypothesmith.pyi b/stubs/hypothesmith.pyi index a8c27975..0568b4d1 100644 --- a/stubs/hypothesmith.pyi +++ b/stubs/hypothesmith.pyi @@ -1,5 +1 @@ -# pyre-unsafe - -from typing import Any - -def __getattr__(name: str) -> Any: ... +# pyre-placeholder-stub diff --git a/stubs/libcst/native.pyi b/stubs/libcst/native.pyi index 2a84d6a7..4741266b 100644 --- a/stubs/libcst/native.pyi +++ b/stubs/libcst/native.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/parser_config.pyi b/stubs/libcst_native/parser_config.pyi index 1a095cfc..412da05e 100644 --- a/stubs/libcst_native/parser_config.pyi +++ b/stubs/libcst_native/parser_config.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/token_type.pyi b/stubs/libcst_native/token_type.pyi index b51e8b48..4dacfad7 100644 --- a/stubs/libcst_native/token_type.pyi +++ b/stubs/libcst_native/token_type.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/tokenize.pyi b/stubs/libcst_native/tokenize.pyi index 12270da5..91c6d2e9 100644 --- a/stubs/libcst_native/tokenize.pyi +++ b/stubs/libcst_native/tokenize.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/whitespace_parser.pyi b/stubs/libcst_native/whitespace_parser.pyi index 7c60189b..b2b51b54 100644 --- a/stubs/libcst_native/whitespace_parser.pyi +++ b/stubs/libcst_native/whitespace_parser.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/libcst_native/whitespace_state.pyi b/stubs/libcst_native/whitespace_state.pyi index 75264a14..82db9527 100644 --- a/stubs/libcst_native/whitespace_state.pyi +++ b/stubs/libcst_native/whitespace_state.pyi @@ -1,4 +1,4 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. +# Copyright (c) Meta Platforms, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. diff --git a/stubs/setuptools.pyi b/stubs/setuptools.pyi index a8c27975..0568b4d1 100644 --- a/stubs/setuptools.pyi +++ b/stubs/setuptools.pyi @@ -1,5 +1 @@ -# pyre-unsafe - -from typing import Any - -def __getattr__(name: str) -> Any: ... +# pyre-placeholder-stub diff --git a/stubs/typing_inspect.pyi b/stubs/typing_inspect.pyi index a8c27975..0568b4d1 100644 --- a/stubs/typing_inspect.pyi +++ b/stubs/typing_inspect.pyi @@ -1,5 +1 @@ -# pyre-unsafe - -from typing import Any - -def __getattr__(name: str) -> Any: ... +# pyre-placeholder-stub diff --git a/uv.lock b/uv.lock deleted file mode 100644 index f271e1ea..00000000 --- a/uv.lock +++ /dev/null @@ -1,2866 +0,0 @@ -version = 1 -revision = 2 -requires-python = ">=3.9" -resolution-markers = [ - "python_full_version >= '3.14'", - "python_full_version == '3.13.*'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", - "python_full_version < '3.10'", -] - -[[package]] -name = "alabaster" -version = "0.7.16" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, -] - -[[package]] -name = "alabaster" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14'", - "python_full_version == '3.13.*'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] -sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, -] - -[[package]] -name = "anyio" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, -] - -[[package]] -name = "appnope" -version = "0.1.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" }, -] - -[[package]] -name = "argon2-cffi" -version = "25.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "argon2-cffi-bindings" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0e/89/ce5af8a7d472a67cc819d5d998aa8c82c5d860608c4db9f46f1162d7dab9/argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1", size = 45706, upload-time = "2025-06-03T06:55:32.073Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741", size = 14657, upload-time = "2025-06-03T06:55:30.804Z" }, -] - -[[package]] -name = "argon2-cffi-bindings" -version = "21.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/e9/184b8ccce6683b0aa2fbb7ba5683ea4b9c5763f1356347f1312c32e3c66e/argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3", size = 1779911, upload-time = "2021-12-01T08:52:55.68Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/13/838ce2620025e9666aa8f686431f67a29052241692a3dd1ae9d3692a89d3/argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367", size = 29658, upload-time = "2021-12-01T09:09:17.016Z" }, - { url = "https://files.pythonhosted.org/packages/b3/02/f7f7bb6b6af6031edb11037639c697b912e1dea2db94d436e681aea2f495/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d", size = 80583, upload-time = "2021-12-01T09:09:19.546Z" }, - { url = "https://files.pythonhosted.org/packages/ec/f7/378254e6dd7ae6f31fe40c8649eea7d4832a42243acaf0f1fff9083b2bed/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae", size = 86168, upload-time = "2021-12-01T09:09:21.445Z" }, - { url = "https://files.pythonhosted.org/packages/74/f6/4a34a37a98311ed73bb80efe422fed95f2ac25a4cacc5ae1d7ae6a144505/argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c", size = 82709, upload-time = "2021-12-01T09:09:18.182Z" }, - { url = "https://files.pythonhosted.org/packages/74/2b/73d767bfdaab25484f7e7901379d5f8793cccbb86c6e0cbc4c1b96f63896/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86", size = 83613, upload-time = "2021-12-01T09:09:22.741Z" }, - { url = "https://files.pythonhosted.org/packages/4f/fd/37f86deef67ff57c76f137a67181949c2d408077e2e3dd70c6c42912c9bf/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f", size = 84583, upload-time = "2021-12-01T09:09:24.177Z" }, - { url = "https://files.pythonhosted.org/packages/6f/52/5a60085a3dae8fded8327a4f564223029f5f54b0cb0455a31131b5363a01/argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e", size = 88475, upload-time = "2021-12-01T09:09:26.673Z" }, - { url = "https://files.pythonhosted.org/packages/8b/95/143cd64feb24a15fa4b189a3e1e7efbaeeb00f39a51e99b26fc62fbacabd/argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082", size = 27698, upload-time = "2021-12-01T09:09:27.87Z" }, - { url = "https://files.pythonhosted.org/packages/37/2c/e34e47c7dee97ba6f01a6203e0383e15b60fb85d78ac9a15cd066f6fe28b/argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f", size = 30817, upload-time = "2021-12-01T09:09:30.267Z" }, - { url = "https://files.pythonhosted.org/packages/5a/e4/bf8034d25edaa495da3c8a3405627d2e35758e44ff6eaa7948092646fdcc/argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93", size = 53104, upload-time = "2021-12-01T09:09:31.335Z" }, -] - -[[package]] -name = "arrow" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "python-dateutil" }, - { name = "types-python-dateutil" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2e/00/0f6e8fcdb23ea632c866620cc872729ff43ed91d284c866b515c6342b173/arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85", size = 131960, upload-time = "2023-09-30T22:11:18.25Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/ed/e97229a566617f2ae958a6b13e7cc0f585470eac730a73e9e82c32a3cdd2/arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80", size = 66419, upload-time = "2023-09-30T22:11:16.072Z" }, -] - -[[package]] -name = "asttokens" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978, upload-time = "2024-11-30T04:30:14.439Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918, upload-time = "2024-11-30T04:30:10.946Z" }, -] - -[[package]] -name = "async-lru" -version = "2.0.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380, upload-time = "2025-03-16T17:25:36.919Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069, upload-time = "2025-03-16T17:25:35.422Z" }, -] - -[[package]] -name = "attrs" -version = "25.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, -] - -[[package]] -name = "babel" -version = "2.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, -] - -[[package]] -name = "beautifulsoup4" -version = "4.13.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "soupsieve" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d8/e4/0c4c39e18fd76d6a628d4dd8da40543d136ce2d1752bd6eeeab0791f4d6b/beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195", size = 621067, upload-time = "2025-04-15T17:05:13.836Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" }, -] - -[[package]] -name = "black" -version = "25.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "mypy-extensions" }, - { name = "packaging" }, - { name = "pathspec" }, - { name = "platformdirs" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/94/49/26a7b0f3f35da4b5a65f081943b7bcd22d7002f5f0fb8098ec1ff21cb6ef/black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666", size = 649449, upload-time = "2025-01-29T04:15:40.373Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/3b/4ba3f93ac8d90410423fdd31d7541ada9bcee1df32fb90d26de41ed40e1d/black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32", size = 1629419, upload-time = "2025-01-29T05:37:06.642Z" }, - { url = "https://files.pythonhosted.org/packages/b4/02/0bde0485146a8a5e694daed47561785e8b77a0466ccc1f3e485d5ef2925e/black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da", size = 1461080, upload-time = "2025-01-29T05:37:09.321Z" }, - { url = "https://files.pythonhosted.org/packages/52/0e/abdf75183c830eaca7589144ff96d49bce73d7ec6ad12ef62185cc0f79a2/black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7", size = 1766886, upload-time = "2025-01-29T04:18:24.432Z" }, - { url = "https://files.pythonhosted.org/packages/dc/a6/97d8bb65b1d8a41f8a6736222ba0a334db7b7b77b8023ab4568288f23973/black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9", size = 1419404, upload-time = "2025-01-29T04:19:04.296Z" }, - { url = "https://files.pythonhosted.org/packages/7e/4f/87f596aca05c3ce5b94b8663dbfe242a12843caaa82dd3f85f1ffdc3f177/black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0", size = 1614372, upload-time = "2025-01-29T05:37:11.71Z" }, - { url = "https://files.pythonhosted.org/packages/e7/d0/2c34c36190b741c59c901e56ab7f6e54dad8df05a6272a9747ecef7c6036/black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299", size = 1442865, upload-time = "2025-01-29T05:37:14.309Z" }, - { url = "https://files.pythonhosted.org/packages/21/d4/7518c72262468430ead45cf22bd86c883a6448b9eb43672765d69a8f1248/black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096", size = 1749699, upload-time = "2025-01-29T04:18:17.688Z" }, - { url = "https://files.pythonhosted.org/packages/58/db/4f5beb989b547f79096e035c4981ceb36ac2b552d0ac5f2620e941501c99/black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2", size = 1428028, upload-time = "2025-01-29T04:18:51.711Z" }, - { url = "https://files.pythonhosted.org/packages/83/71/3fe4741df7adf015ad8dfa082dd36c94ca86bb21f25608eb247b4afb15b2/black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b", size = 1650988, upload-time = "2025-01-29T05:37:16.707Z" }, - { url = "https://files.pythonhosted.org/packages/13/f3/89aac8a83d73937ccd39bbe8fc6ac8860c11cfa0af5b1c96d081facac844/black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc", size = 1453985, upload-time = "2025-01-29T05:37:18.273Z" }, - { url = "https://files.pythonhosted.org/packages/6f/22/b99efca33f1f3a1d2552c714b1e1b5ae92efac6c43e790ad539a163d1754/black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f", size = 1783816, upload-time = "2025-01-29T04:18:33.823Z" }, - { url = "https://files.pythonhosted.org/packages/18/7e/a27c3ad3822b6f2e0e00d63d58ff6299a99a5b3aee69fa77cd4b0076b261/black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba", size = 1440860, upload-time = "2025-01-29T04:19:12.944Z" }, - { url = "https://files.pythonhosted.org/packages/98/87/0edf98916640efa5d0696e1abb0a8357b52e69e82322628f25bf14d263d1/black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f", size = 1650673, upload-time = "2025-01-29T05:37:20.574Z" }, - { url = "https://files.pythonhosted.org/packages/52/e5/f7bf17207cf87fa6e9b676576749c6b6ed0d70f179a3d812c997870291c3/black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3", size = 1453190, upload-time = "2025-01-29T05:37:22.106Z" }, - { url = "https://files.pythonhosted.org/packages/e3/ee/adda3d46d4a9120772fae6de454c8495603c37c4c3b9c60f25b1ab6401fe/black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171", size = 1782926, upload-time = "2025-01-29T04:18:58.564Z" }, - { url = "https://files.pythonhosted.org/packages/cc/64/94eb5f45dcb997d2082f097a3944cfc7fe87e071907f677e80788a2d7b7a/black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18", size = 1442613, upload-time = "2025-01-29T04:19:27.63Z" }, - { url = "https://files.pythonhosted.org/packages/d3/b6/ae7507470a4830dbbfe875c701e84a4a5fb9183d1497834871a715716a92/black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0", size = 1628593, upload-time = "2025-01-29T05:37:23.672Z" }, - { url = "https://files.pythonhosted.org/packages/24/c1/ae36fa59a59f9363017ed397750a0cd79a470490860bc7713967d89cdd31/black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f", size = 1460000, upload-time = "2025-01-29T05:37:25.829Z" }, - { url = "https://files.pythonhosted.org/packages/ac/b6/98f832e7a6c49aa3a464760c67c7856363aa644f2f3c74cf7d624168607e/black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e", size = 1765963, upload-time = "2025-01-29T04:18:38.116Z" }, - { url = "https://files.pythonhosted.org/packages/ce/e9/2cb0a017eb7024f70e0d2e9bdb8c5a5b078c5740c7f8816065d06f04c557/black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355", size = 1419419, upload-time = "2025-01-29T04:18:30.191Z" }, - { url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" }, -] - -[[package]] -name = "bleach" -version = "6.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "webencodings" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/76/9a/0e33f5054c54d349ea62c277191c020c2d6ef1d65ab2cb1993f91ec846d1/bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f", size = 203083, upload-time = "2024-10-29T18:30:40.477Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/55/96142937f66150805c25c4d0f31ee4132fd33497753400734f9dfdcbdc66/bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e", size = 163406, upload-time = "2024-10-29T18:30:38.186Z" }, -] - -[package.optional-dependencies] -css = [ - { name = "tinycss2" }, -] - -[[package]] -name = "build" -version = "1.2.2.post1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "os_name == 'nt'" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10.2'" }, - { name = "packaging" }, - { name = "pyproject-hooks" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7d/46/aeab111f8e06793e4f0e421fcad593d547fb8313b50990f31681ee2fb1ad/build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7", size = 46701, upload-time = "2024-10-06T17:22:25.251Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/c2/80633736cd183ee4a62107413def345f7e6e3c01563dbca1417363cf957e/build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5", size = 22950, upload-time = "2024-10-06T17:22:23.299Z" }, -] - -[[package]] -name = "certifi" -version = "2025.4.26" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, -] - -[[package]] -name = "cffi" -version = "1.17.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pycparser" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191, upload-time = "2024-09-04T20:43:30.027Z" }, - { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592, upload-time = "2024-09-04T20:43:32.108Z" }, - { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024, upload-time = "2024-09-04T20:43:34.186Z" }, - { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188, upload-time = "2024-09-04T20:43:36.286Z" }, - { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571, upload-time = "2024-09-04T20:43:38.586Z" }, - { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687, upload-time = "2024-09-04T20:43:40.084Z" }, - { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211, upload-time = "2024-09-04T20:43:41.526Z" }, - { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325, upload-time = "2024-09-04T20:43:43.117Z" }, - { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784, upload-time = "2024-09-04T20:43:45.256Z" }, - { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564, upload-time = "2024-09-04T20:43:46.779Z" }, - { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804, upload-time = "2024-09-04T20:43:48.186Z" }, - { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299, upload-time = "2024-09-04T20:43:49.812Z" }, - { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" }, - { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" }, - { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" }, - { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" }, - { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" }, - { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" }, - { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" }, - { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" }, - { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" }, - { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" }, - { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" }, - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, - { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, - { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, - { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, - { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, - { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, - { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, - { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, - { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, - { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, - { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220, upload-time = "2024-09-04T20:45:01.577Z" }, - { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605, upload-time = "2024-09-04T20:45:03.837Z" }, - { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910, upload-time = "2024-09-04T20:45:05.315Z" }, - { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200, upload-time = "2024-09-04T20:45:06.903Z" }, - { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565, upload-time = "2024-09-04T20:45:08.975Z" }, - { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635, upload-time = "2024-09-04T20:45:10.64Z" }, - { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218, upload-time = "2024-09-04T20:45:12.366Z" }, - { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486, upload-time = "2024-09-04T20:45:13.935Z" }, - { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911, upload-time = "2024-09-04T20:45:15.696Z" }, - { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632, upload-time = "2024-09-04T20:45:17.284Z" }, - { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820, upload-time = "2024-09-04T20:45:18.762Z" }, - { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290, upload-time = "2024-09-04T20:45:20.226Z" }, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, - { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, - { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, - { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, - { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, - { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, - { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, - { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, - { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, - { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, - { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, - { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, - { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, - { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, - { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, - { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, - { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, - { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, - { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, - { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, - { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, - { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, - { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, - { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, - { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, - { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, - { url = "https://files.pythonhosted.org/packages/28/f8/dfb01ff6cc9af38552c69c9027501ff5a5117c4cc18dcd27cb5259fa1888/charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4", size = 201671, upload-time = "2025-05-02T08:34:12.696Z" }, - { url = "https://files.pythonhosted.org/packages/32/fb/74e26ee556a9dbfe3bd264289b67be1e6d616329403036f6507bb9f3f29c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7", size = 144744, upload-time = "2025-05-02T08:34:14.665Z" }, - { url = "https://files.pythonhosted.org/packages/ad/06/8499ee5aa7addc6f6d72e068691826ff093329fe59891e83b092ae4c851c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836", size = 154993, upload-time = "2025-05-02T08:34:17.134Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a2/5e4c187680728219254ef107a6949c60ee0e9a916a5dadb148c7ae82459c/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597", size = 147382, upload-time = "2025-05-02T08:34:19.081Z" }, - { url = "https://files.pythonhosted.org/packages/4c/fe/56aca740dda674f0cc1ba1418c4d84534be51f639b5f98f538b332dc9a95/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7", size = 149536, upload-time = "2025-05-02T08:34:21.073Z" }, - { url = "https://files.pythonhosted.org/packages/53/13/db2e7779f892386b589173dd689c1b1e304621c5792046edd8a978cbf9e0/charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f", size = 151349, upload-time = "2025-05-02T08:34:23.193Z" }, - { url = "https://files.pythonhosted.org/packages/69/35/e52ab9a276186f729bce7a0638585d2982f50402046e4b0faa5d2c3ef2da/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba", size = 146365, upload-time = "2025-05-02T08:34:25.187Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d8/af7333f732fc2e7635867d56cb7c349c28c7094910c72267586947561b4b/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12", size = 154499, upload-time = "2025-05-02T08:34:27.359Z" }, - { url = "https://files.pythonhosted.org/packages/7a/3d/a5b2e48acef264d71e036ff30bcc49e51bde80219bb628ba3e00cf59baac/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518", size = 157735, upload-time = "2025-05-02T08:34:29.798Z" }, - { url = "https://files.pythonhosted.org/packages/85/d8/23e2c112532a29f3eef374375a8684a4f3b8e784f62b01da931186f43494/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5", size = 154786, upload-time = "2025-05-02T08:34:31.858Z" }, - { url = "https://files.pythonhosted.org/packages/c7/57/93e0169f08ecc20fe82d12254a200dfaceddc1c12a4077bf454ecc597e33/charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3", size = 150203, upload-time = "2025-05-02T08:34:33.88Z" }, - { url = "https://files.pythonhosted.org/packages/2c/9d/9bf2b005138e7e060d7ebdec7503d0ef3240141587651f4b445bdf7286c2/charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471", size = 98436, upload-time = "2025-05-02T08:34:35.907Z" }, - { url = "https://files.pythonhosted.org/packages/6d/24/5849d46cf4311bbf21b424c443b09b459f5b436b1558c04e45dbb7cc478b/charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e", size = 105772, upload-time = "2025-05-02T08:34:37.935Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, -] - -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, -] - -[[package]] -name = "click" -version = "8.2.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14'", - "python_full_version == '3.13.*'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, -] - -[[package]] -name = "comm" -version = "0.2.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/a8/fb783cb0abe2b5fded9f55e5703015cdf1c9c85b3669087c538dd15a6a86/comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e", size = 6210, upload-time = "2024-03-12T16:53:41.133Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/75/49e5bfe642f71f272236b5b2d2691cf915a7283cc0ceda56357b61daa538/comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3", size = 7180, upload-time = "2024-03-12T16:53:39.226Z" }, -] - -[[package]] -name = "coverage" -version = "7.8.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/07/998afa4a0ecdf9b1981ae05415dad2d4e7716e1b1f00abbd91691ac09ac9/coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27", size = 812759, upload-time = "2025-05-23T11:39:57.856Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/6b/7dd06399a5c0b81007e3a6af0395cd60e6a30f959f8d407d3ee04642e896/coverage-7.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a", size = 211573, upload-time = "2025-05-23T11:37:47.207Z" }, - { url = "https://files.pythonhosted.org/packages/f0/df/2b24090820a0bac1412955fb1a4dade6bc3b8dcef7b899c277ffaf16916d/coverage-7.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be", size = 212006, upload-time = "2025-05-23T11:37:50.289Z" }, - { url = "https://files.pythonhosted.org/packages/c5/c4/e4e3b998e116625562a872a342419652fa6ca73f464d9faf9f52f1aff427/coverage-7.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3", size = 241128, upload-time = "2025-05-23T11:37:52.229Z" }, - { url = "https://files.pythonhosted.org/packages/b1/67/b28904afea3e87a895da850ba587439a61699bf4b73d04d0dfd99bbd33b4/coverage-7.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6", size = 239026, upload-time = "2025-05-23T11:37:53.846Z" }, - { url = "https://files.pythonhosted.org/packages/8c/0f/47bf7c5630d81bc2cd52b9e13043685dbb7c79372a7f5857279cc442b37c/coverage-7.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622", size = 240172, upload-time = "2025-05-23T11:37:55.711Z" }, - { url = "https://files.pythonhosted.org/packages/ba/38/af3eb9d36d85abc881f5aaecf8209383dbe0fa4cac2d804c55d05c51cb04/coverage-7.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c", size = 240086, upload-time = "2025-05-23T11:37:57.724Z" }, - { url = "https://files.pythonhosted.org/packages/9e/64/c40c27c2573adeba0fe16faf39a8aa57368a1f2148865d6bb24c67eadb41/coverage-7.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3", size = 238792, upload-time = "2025-05-23T11:37:59.737Z" }, - { url = "https://files.pythonhosted.org/packages/8e/ab/b7c85146f15457671c1412afca7c25a5696d7625e7158002aa017e2d7e3c/coverage-7.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404", size = 239096, upload-time = "2025-05-23T11:38:01.693Z" }, - { url = "https://files.pythonhosted.org/packages/d3/50/9446dad1310905fb1dc284d60d4320a5b25d4e3e33f9ea08b8d36e244e23/coverage-7.8.2-cp310-cp310-win32.whl", hash = "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7", size = 214144, upload-time = "2025-05-23T11:38:03.68Z" }, - { url = "https://files.pythonhosted.org/packages/23/ed/792e66ad7b8b0df757db8d47af0c23659cdb5a65ef7ace8b111cacdbee89/coverage-7.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347", size = 215043, upload-time = "2025-05-23T11:38:05.217Z" }, - { url = "https://files.pythonhosted.org/packages/6a/4d/1ff618ee9f134d0de5cc1661582c21a65e06823f41caf801aadf18811a8e/coverage-7.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9", size = 211692, upload-time = "2025-05-23T11:38:08.485Z" }, - { url = "https://files.pythonhosted.org/packages/96/fa/c3c1b476de96f2bc7a8ca01a9f1fcb51c01c6b60a9d2c3e66194b2bdb4af/coverage-7.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879", size = 212115, upload-time = "2025-05-23T11:38:09.989Z" }, - { url = "https://files.pythonhosted.org/packages/f7/c2/5414c5a1b286c0f3881ae5adb49be1854ac5b7e99011501f81c8c1453065/coverage-7.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a", size = 244740, upload-time = "2025-05-23T11:38:11.947Z" }, - { url = "https://files.pythonhosted.org/packages/cd/46/1ae01912dfb06a642ef3dd9cf38ed4996fda8fe884dab8952da616f81a2b/coverage-7.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5", size = 242429, upload-time = "2025-05-23T11:38:13.955Z" }, - { url = "https://files.pythonhosted.org/packages/06/58/38c676aec594bfe2a87c7683942e5a30224791d8df99bcc8439fde140377/coverage-7.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11", size = 244218, upload-time = "2025-05-23T11:38:15.631Z" }, - { url = "https://files.pythonhosted.org/packages/80/0c/95b1023e881ce45006d9abc250f76c6cdab7134a1c182d9713878dfefcb2/coverage-7.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a", size = 243865, upload-time = "2025-05-23T11:38:17.622Z" }, - { url = "https://files.pythonhosted.org/packages/57/37/0ae95989285a39e0839c959fe854a3ae46c06610439350d1ab860bf020ac/coverage-7.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb", size = 242038, upload-time = "2025-05-23T11:38:19.966Z" }, - { url = "https://files.pythonhosted.org/packages/4d/82/40e55f7c0eb5e97cc62cbd9d0746fd24e8caf57be5a408b87529416e0c70/coverage-7.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54", size = 242567, upload-time = "2025-05-23T11:38:21.912Z" }, - { url = "https://files.pythonhosted.org/packages/f9/35/66a51adc273433a253989f0d9cc7aa6bcdb4855382cf0858200afe578861/coverage-7.8.2-cp311-cp311-win32.whl", hash = "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a", size = 214194, upload-time = "2025-05-23T11:38:23.571Z" }, - { url = "https://files.pythonhosted.org/packages/f6/8f/a543121f9f5f150eae092b08428cb4e6b6d2d134152c3357b77659d2a605/coverage-7.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975", size = 215109, upload-time = "2025-05-23T11:38:25.137Z" }, - { url = "https://files.pythonhosted.org/packages/77/65/6cc84b68d4f35186463cd7ab1da1169e9abb59870c0f6a57ea6aba95f861/coverage-7.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53", size = 213521, upload-time = "2025-05-23T11:38:27.123Z" }, - { url = "https://files.pythonhosted.org/packages/8d/2a/1da1ada2e3044fcd4a3254fb3576e160b8fe5b36d705c8a31f793423f763/coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c", size = 211876, upload-time = "2025-05-23T11:38:29.01Z" }, - { url = "https://files.pythonhosted.org/packages/70/e9/3d715ffd5b6b17a8be80cd14a8917a002530a99943cc1939ad5bb2aa74b9/coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1", size = 212130, upload-time = "2025-05-23T11:38:30.675Z" }, - { url = "https://files.pythonhosted.org/packages/a0/02/fdce62bb3c21649abfd91fbdcf041fb99be0d728ff00f3f9d54d97ed683e/coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279", size = 246176, upload-time = "2025-05-23T11:38:32.395Z" }, - { url = "https://files.pythonhosted.org/packages/a7/52/decbbed61e03b6ffe85cd0fea360a5e04a5a98a7423f292aae62423b8557/coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99", size = 243068, upload-time = "2025-05-23T11:38:33.989Z" }, - { url = "https://files.pythonhosted.org/packages/38/6c/d0e9c0cce18faef79a52778219a3c6ee8e336437da8eddd4ab3dbd8fadff/coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20", size = 245328, upload-time = "2025-05-23T11:38:35.568Z" }, - { url = "https://files.pythonhosted.org/packages/f0/70/f703b553a2f6b6c70568c7e398ed0789d47f953d67fbba36a327714a7bca/coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2", size = 245099, upload-time = "2025-05-23T11:38:37.627Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fb/4cbb370dedae78460c3aacbdad9d249e853f3bc4ce5ff0e02b1983d03044/coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57", size = 243314, upload-time = "2025-05-23T11:38:39.238Z" }, - { url = "https://files.pythonhosted.org/packages/39/9f/1afbb2cb9c8699b8bc38afdce00a3b4644904e6a38c7bf9005386c9305ec/coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f", size = 244489, upload-time = "2025-05-23T11:38:40.845Z" }, - { url = "https://files.pythonhosted.org/packages/79/fa/f3e7ec7d220bff14aba7a4786ae47043770cbdceeea1803083059c878837/coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8", size = 214366, upload-time = "2025-05-23T11:38:43.551Z" }, - { url = "https://files.pythonhosted.org/packages/54/aa/9cbeade19b7e8e853e7ffc261df885d66bf3a782c71cba06c17df271f9e6/coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223", size = 215165, upload-time = "2025-05-23T11:38:45.148Z" }, - { url = "https://files.pythonhosted.org/packages/c4/73/e2528bf1237d2448f882bbebaec5c3500ef07301816c5c63464b9da4d88a/coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f", size = 213548, upload-time = "2025-05-23T11:38:46.74Z" }, - { url = "https://files.pythonhosted.org/packages/1a/93/eb6400a745ad3b265bac36e8077fdffcf0268bdbbb6c02b7220b624c9b31/coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca", size = 211898, upload-time = "2025-05-23T11:38:49.066Z" }, - { url = "https://files.pythonhosted.org/packages/1b/7c/bdbf113f92683024406a1cd226a199e4200a2001fc85d6a6e7e299e60253/coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d", size = 212171, upload-time = "2025-05-23T11:38:51.207Z" }, - { url = "https://files.pythonhosted.org/packages/91/22/594513f9541a6b88eb0dba4d5da7d71596dadef6b17a12dc2c0e859818a9/coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85", size = 245564, upload-time = "2025-05-23T11:38:52.857Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f4/2860fd6abeebd9f2efcfe0fd376226938f22afc80c1943f363cd3c28421f/coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257", size = 242719, upload-time = "2025-05-23T11:38:54.529Z" }, - { url = "https://files.pythonhosted.org/packages/89/60/f5f50f61b6332451520e6cdc2401700c48310c64bc2dd34027a47d6ab4ca/coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108", size = 244634, upload-time = "2025-05-23T11:38:57.326Z" }, - { url = "https://files.pythonhosted.org/packages/3b/70/7f4e919039ab7d944276c446b603eea84da29ebcf20984fb1fdf6e602028/coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0", size = 244824, upload-time = "2025-05-23T11:38:59.421Z" }, - { url = "https://files.pythonhosted.org/packages/26/45/36297a4c0cea4de2b2c442fe32f60c3991056c59cdc3cdd5346fbb995c97/coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050", size = 242872, upload-time = "2025-05-23T11:39:01.049Z" }, - { url = "https://files.pythonhosted.org/packages/a4/71/e041f1b9420f7b786b1367fa2a375703889ef376e0d48de9f5723fb35f11/coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48", size = 244179, upload-time = "2025-05-23T11:39:02.709Z" }, - { url = "https://files.pythonhosted.org/packages/bd/db/3c2bf49bdc9de76acf2491fc03130c4ffc51469ce2f6889d2640eb563d77/coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7", size = 214393, upload-time = "2025-05-23T11:39:05.457Z" }, - { url = "https://files.pythonhosted.org/packages/c6/dc/947e75d47ebbb4b02d8babb1fad4ad381410d5bc9da7cfca80b7565ef401/coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3", size = 215194, upload-time = "2025-05-23T11:39:07.171Z" }, - { url = "https://files.pythonhosted.org/packages/90/31/a980f7df8a37eaf0dc60f932507fda9656b3a03f0abf188474a0ea188d6d/coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7", size = 213580, upload-time = "2025-05-23T11:39:08.862Z" }, - { url = "https://files.pythonhosted.org/packages/8a/6a/25a37dd90f6c95f59355629417ebcb74e1c34e38bb1eddf6ca9b38b0fc53/coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008", size = 212734, upload-time = "2025-05-23T11:39:11.109Z" }, - { url = "https://files.pythonhosted.org/packages/36/8b/3a728b3118988725f40950931abb09cd7f43b3c740f4640a59f1db60e372/coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36", size = 212959, upload-time = "2025-05-23T11:39:12.751Z" }, - { url = "https://files.pythonhosted.org/packages/53/3c/212d94e6add3a3c3f412d664aee452045ca17a066def8b9421673e9482c4/coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46", size = 257024, upload-time = "2025-05-23T11:39:15.569Z" }, - { url = "https://files.pythonhosted.org/packages/a4/40/afc03f0883b1e51bbe804707aae62e29c4e8c8bbc365c75e3e4ddeee9ead/coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be", size = 252867, upload-time = "2025-05-23T11:39:17.64Z" }, - { url = "https://files.pythonhosted.org/packages/18/a2/3699190e927b9439c6ded4998941a3c1d6fa99e14cb28d8536729537e307/coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740", size = 255096, upload-time = "2025-05-23T11:39:19.328Z" }, - { url = "https://files.pythonhosted.org/packages/b4/06/16e3598b9466456b718eb3e789457d1a5b8bfb22e23b6e8bbc307df5daf0/coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625", size = 256276, upload-time = "2025-05-23T11:39:21.077Z" }, - { url = "https://files.pythonhosted.org/packages/a7/d5/4b5a120d5d0223050a53d2783c049c311eea1709fa9de12d1c358e18b707/coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b", size = 254478, upload-time = "2025-05-23T11:39:22.838Z" }, - { url = "https://files.pythonhosted.org/packages/ba/85/f9ecdb910ecdb282b121bfcaa32fa8ee8cbd7699f83330ee13ff9bbf1a85/coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199", size = 255255, upload-time = "2025-05-23T11:39:24.644Z" }, - { url = "https://files.pythonhosted.org/packages/50/63/2d624ac7d7ccd4ebbd3c6a9eba9d7fc4491a1226071360d59dd84928ccb2/coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8", size = 215109, upload-time = "2025-05-23T11:39:26.722Z" }, - { url = "https://files.pythonhosted.org/packages/22/5e/7053b71462e970e869111c1853afd642212568a350eba796deefdfbd0770/coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d", size = 216268, upload-time = "2025-05-23T11:39:28.429Z" }, - { url = "https://files.pythonhosted.org/packages/07/69/afa41aa34147655543dbe96994f8a246daf94b361ccf5edfd5df62ce066a/coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b", size = 214071, upload-time = "2025-05-23T11:39:30.55Z" }, - { url = "https://files.pythonhosted.org/packages/71/1e/388267ad9c6aa126438acc1ceafede3bb746afa9872e3ec5f0691b7d5efa/coverage-7.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a", size = 211566, upload-time = "2025-05-23T11:39:32.333Z" }, - { url = "https://files.pythonhosted.org/packages/8f/a5/acc03e5cf0bba6357f5e7c676343de40fbf431bb1e115fbebf24b2f7f65e/coverage-7.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d", size = 211996, upload-time = "2025-05-23T11:39:34.512Z" }, - { url = "https://files.pythonhosted.org/packages/5b/a2/0fc0a9f6b7c24fa4f1d7210d782c38cb0d5e692666c36eaeae9a441b6755/coverage-7.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca", size = 240741, upload-time = "2025-05-23T11:39:36.252Z" }, - { url = "https://files.pythonhosted.org/packages/e6/da/1c6ba2cf259710eed8916d4fd201dccc6be7380ad2b3b9f63ece3285d809/coverage-7.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d", size = 238672, upload-time = "2025-05-23T11:39:38.03Z" }, - { url = "https://files.pythonhosted.org/packages/ac/51/c8fae0dc3ca421e6e2509503696f910ff333258db672800c3bdef256265a/coverage-7.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787", size = 239769, upload-time = "2025-05-23T11:39:40.24Z" }, - { url = "https://files.pythonhosted.org/packages/59/8e/b97042ae92c59f40be0c989df090027377ba53f2d6cef73c9ca7685c26a6/coverage-7.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7", size = 239555, upload-time = "2025-05-23T11:39:42.3Z" }, - { url = "https://files.pythonhosted.org/packages/47/35/b8893e682d6e96b1db2af5997fc13ef62219426fb17259d6844c693c5e00/coverage-7.8.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3", size = 237768, upload-time = "2025-05-23T11:39:44.069Z" }, - { url = "https://files.pythonhosted.org/packages/03/6c/023b0b9a764cb52d6243a4591dcb53c4caf4d7340445113a1f452bb80591/coverage-7.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7", size = 238757, upload-time = "2025-05-23T11:39:46.195Z" }, - { url = "https://files.pythonhosted.org/packages/03/ed/3af7e4d721bd61a8df7de6de9e8a4271e67f3d9e086454558fd9f48eb4f6/coverage-7.8.2-cp39-cp39-win32.whl", hash = "sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a", size = 214166, upload-time = "2025-05-23T11:39:47.934Z" }, - { url = "https://files.pythonhosted.org/packages/9d/30/ee774b626773750dc6128354884652507df3c59d6aa8431526107e595227/coverage-7.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e", size = 215050, upload-time = "2025-05-23T11:39:50.252Z" }, - { url = "https://files.pythonhosted.org/packages/69/2f/572b29496d8234e4a7773200dd835a0d32d9e171f2d974f3fe04a9dbc271/coverage-7.8.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837", size = 203636, upload-time = "2025-05-23T11:39:52.002Z" }, - { url = "https://files.pythonhosted.org/packages/a0/1a/0b9c32220ad694d66062f571cc5cedfa9997b64a591e8a500bb63de1bd40/coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32", size = 203623, upload-time = "2025-05-23T11:39:53.846Z" }, -] - -[package.optional-dependencies] -toml = [ - { name = "tomli", marker = "python_full_version <= '3.11'" }, -] - -[[package]] -name = "dataclasses-json" -version = "0.6.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "marshmallow" }, - { name = "typing-inspect" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227, upload-time = "2024-06-09T16:20:19.103Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686, upload-time = "2024-06-09T16:20:16.715Z" }, -] - -[[package]] -name = "debugpy" -version = "1.8.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/75/087fe07d40f490a78782ff3b0a30e3968936854105487decdb33446d4b0e/debugpy-1.8.14.tar.gz", hash = "sha256:7cd287184318416850aa8b60ac90105837bb1e59531898c07569d197d2ed5322", size = 1641444, upload-time = "2025-04-10T19:46:10.981Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/df/156df75a41aaebd97cee9d3870fe68f8001b6c1c4ca023e221cfce69bece/debugpy-1.8.14-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:93fee753097e85623cab1c0e6a68c76308cd9f13ffdf44127e6fab4fbf024339", size = 2076510, upload-time = "2025-04-10T19:46:13.315Z" }, - { url = "https://files.pythonhosted.org/packages/69/cd/4fc391607bca0996db5f3658762106e3d2427beaef9bfd363fd370a3c054/debugpy-1.8.14-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d937d93ae4fa51cdc94d3e865f535f185d5f9748efb41d0d49e33bf3365bd79", size = 3559614, upload-time = "2025-04-10T19:46:14.647Z" }, - { url = "https://files.pythonhosted.org/packages/1a/42/4e6d2b9d63e002db79edfd0cb5656f1c403958915e0e73ab3e9220012eec/debugpy-1.8.14-cp310-cp310-win32.whl", hash = "sha256:c442f20577b38cc7a9aafecffe1094f78f07fb8423c3dddb384e6b8f49fd2987", size = 5208588, upload-time = "2025-04-10T19:46:16.233Z" }, - { url = "https://files.pythonhosted.org/packages/97/b1/cc9e4e5faadc9d00df1a64a3c2d5c5f4b9df28196c39ada06361c5141f89/debugpy-1.8.14-cp310-cp310-win_amd64.whl", hash = "sha256:f117dedda6d969c5c9483e23f573b38f4e39412845c7bc487b6f2648df30fe84", size = 5241043, upload-time = "2025-04-10T19:46:17.768Z" }, - { url = "https://files.pythonhosted.org/packages/67/e8/57fe0c86915671fd6a3d2d8746e40485fd55e8d9e682388fbb3a3d42b86f/debugpy-1.8.14-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:1b2ac8c13b2645e0b1eaf30e816404990fbdb168e193322be8f545e8c01644a9", size = 2175064, upload-time = "2025-04-10T19:46:19.486Z" }, - { url = "https://files.pythonhosted.org/packages/3b/97/2b2fd1b1c9569c6764ccdb650a6f752e4ac31be465049563c9eb127a8487/debugpy-1.8.14-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf431c343a99384ac7eab2f763980724834f933a271e90496944195318c619e2", size = 3132359, upload-time = "2025-04-10T19:46:21.192Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ee/b825c87ed06256ee2a7ed8bab8fb3bb5851293bf9465409fdffc6261c426/debugpy-1.8.14-cp311-cp311-win32.whl", hash = "sha256:c99295c76161ad8d507b413cd33422d7c542889fbb73035889420ac1fad354f2", size = 5133269, upload-time = "2025-04-10T19:46:23.047Z" }, - { url = "https://files.pythonhosted.org/packages/d5/a6/6c70cd15afa43d37839d60f324213843174c1d1e6bb616bd89f7c1341bac/debugpy-1.8.14-cp311-cp311-win_amd64.whl", hash = "sha256:7816acea4a46d7e4e50ad8d09d963a680ecc814ae31cdef3622eb05ccacf7b01", size = 5158156, upload-time = "2025-04-10T19:46:24.521Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2a/ac2df0eda4898f29c46eb6713a5148e6f8b2b389c8ec9e425a4a1d67bf07/debugpy-1.8.14-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:8899c17920d089cfa23e6005ad9f22582fd86f144b23acb9feeda59e84405b84", size = 2501268, upload-time = "2025-04-10T19:46:26.044Z" }, - { url = "https://files.pythonhosted.org/packages/10/53/0a0cb5d79dd9f7039169f8bf94a144ad3efa52cc519940b3b7dde23bcb89/debugpy-1.8.14-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6bb5c0dcf80ad5dbc7b7d6eac484e2af34bdacdf81df09b6a3e62792b722826", size = 4221077, upload-time = "2025-04-10T19:46:27.464Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d5/84e01821f362327bf4828728aa31e907a2eca7c78cd7c6ec062780d249f8/debugpy-1.8.14-cp312-cp312-win32.whl", hash = "sha256:281d44d248a0e1791ad0eafdbbd2912ff0de9eec48022a5bfbc332957487ed3f", size = 5255127, upload-time = "2025-04-10T19:46:29.467Z" }, - { url = "https://files.pythonhosted.org/packages/33/16/1ed929d812c758295cac7f9cf3dab5c73439c83d9091f2d91871e648093e/debugpy-1.8.14-cp312-cp312-win_amd64.whl", hash = "sha256:5aa56ef8538893e4502a7d79047fe39b1dae08d9ae257074c6464a7b290b806f", size = 5297249, upload-time = "2025-04-10T19:46:31.538Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/395c792b243f2367d84202dc33689aa3d910fb9826a7491ba20fc9e261f5/debugpy-1.8.14-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:329a15d0660ee09fec6786acdb6e0443d595f64f5d096fc3e3ccf09a4259033f", size = 2485676, upload-time = "2025-04-10T19:46:32.96Z" }, - { url = "https://files.pythonhosted.org/packages/ba/f1/6f2ee3f991327ad9e4c2f8b82611a467052a0fb0e247390192580e89f7ff/debugpy-1.8.14-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f920c7f9af409d90f5fd26e313e119d908b0dd2952c2393cd3247a462331f15", size = 4217514, upload-time = "2025-04-10T19:46:34.336Z" }, - { url = "https://files.pythonhosted.org/packages/79/28/b9d146f8f2dc535c236ee09ad3e5ac899adb39d7a19b49f03ac95d216beb/debugpy-1.8.14-cp313-cp313-win32.whl", hash = "sha256:3784ec6e8600c66cbdd4ca2726c72d8ca781e94bce2f396cc606d458146f8f4e", size = 5254756, upload-time = "2025-04-10T19:46:36.199Z" }, - { url = "https://files.pythonhosted.org/packages/e0/62/a7b4a57013eac4ccaef6977966e6bec5c63906dd25a86e35f155952e29a1/debugpy-1.8.14-cp313-cp313-win_amd64.whl", hash = "sha256:684eaf43c95a3ec39a96f1f5195a7ff3d4144e4a18d69bb66beeb1a6de605d6e", size = 5297119, upload-time = "2025-04-10T19:46:38.141Z" }, - { url = "https://files.pythonhosted.org/packages/85/6f/96ba96545f55b6a675afa08c96b42810de9b18c7ad17446bbec82762127a/debugpy-1.8.14-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:413512d35ff52c2fb0fd2d65e69f373ffd24f0ecb1fac514c04a668599c5ce7f", size = 2077696, upload-time = "2025-04-10T19:46:46.817Z" }, - { url = "https://files.pythonhosted.org/packages/fa/84/f378a2dd837d94de3c85bca14f1db79f8fcad7e20b108b40d59da56a6d22/debugpy-1.8.14-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c9156f7524a0d70b7a7e22b2e311d8ba76a15496fb00730e46dcdeedb9e1eea", size = 3554846, upload-time = "2025-04-10T19:46:48.72Z" }, - { url = "https://files.pythonhosted.org/packages/db/52/88824fe5d6893f59933f664c6e12783749ab537a2101baf5c713164d8aa2/debugpy-1.8.14-cp39-cp39-win32.whl", hash = "sha256:b44985f97cc3dd9d52c42eb59ee9d7ee0c4e7ecd62bca704891f997de4cef23d", size = 5209350, upload-time = "2025-04-10T19:46:50.284Z" }, - { url = "https://files.pythonhosted.org/packages/41/35/72e9399be24a04cb72cfe1284572c9fcd1d742c7fa23786925c18fa54ad8/debugpy-1.8.14-cp39-cp39-win_amd64.whl", hash = "sha256:b1528cfee6c1b1c698eb10b6b096c598738a8238822d218173d21c3086de8123", size = 5241852, upload-time = "2025-04-10T19:46:52.022Z" }, - { url = "https://files.pythonhosted.org/packages/97/1a/481f33c37ee3ac8040d3d51fc4c4e4e7e61cb08b8bc8971d6032acc2279f/debugpy-1.8.14-py2.py3-none-any.whl", hash = "sha256:5cd9a579d553b6cb9759a7908a41988ee6280b961f24f63336835d9418216a20", size = 5256230, upload-time = "2025-04-10T19:46:54.077Z" }, -] - -[[package]] -name = "decorator" -version = "5.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, -] - -[[package]] -name = "docutils" -version = "0.21.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, -] - -[[package]] -name = "exceptiongroup" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, -] - -[[package]] -name = "executing" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/91/50/a9d80c47ff289c611ff12e63f7c5d13942c65d68125160cefd768c73e6e4/executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755", size = 978693, upload-time = "2025-01-22T15:41:29.403Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa", size = 26702, upload-time = "2025-01-22T15:41:25.929Z" }, -] - -[[package]] -name = "fastjsonschema" -version = "2.21.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/50/4b769ce1ac4071a1ef6d86b1a3fb56cdc3a37615e8c5519e1af96cdac366/fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4", size = 373939, upload-time = "2024-12-02T10:55:15.133Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/90/2b/0817a2b257fe88725c25589d89aec060581aabf668707a8d03b2e9e0cb2a/fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667", size = 23924, upload-time = "2024-12-02T10:55:07.599Z" }, -] - -[[package]] -name = "fixit" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "libcst" }, - { name = "moreorless" }, - { name = "packaging" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "trailrunner" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/49/22/fc513f039c17024fde3fe2ebe3bc93e4972f7717694613b1bc109068bfc1/fixit-2.1.0.tar.gz", hash = "sha256:b31665cb6491d659d8dfef5a6078a7e9f786e299826636d03d6bd91b6f71e95b", size = 219817, upload-time = "2023-10-26T02:37:14.329Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/48/91/635a1d52f36a546449031c63e54220c8a71e898bcd9cbccfe1181fc1812c/fixit-2.1.0-py3-none-any.whl", hash = "sha256:76b286c0abb9d6a63e5c7d1b6673a041c4356e93d70472e94a9ad2c447da7753", size = 83583, upload-time = "2023-10-26T02:37:12.574Z" }, -] - -[[package]] -name = "flake8" -version = "7.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mccabe" }, - { name = "pycodestyle" }, - { name = "pyflakes" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e7/c4/5842fc9fc94584c455543540af62fd9900faade32511fab650e9891ec225/flake8-7.2.0.tar.gz", hash = "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426", size = 48177, upload-time = "2025-03-29T20:08:39.329Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/83/5c/0627be4c9976d56b1217cb5187b7504e7fd7d3503f8bfd312a04077bd4f7/flake8-7.2.0-py2.py3-none-any.whl", hash = "sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343", size = 57786, upload-time = "2025-03-29T20:08:37.902Z" }, -] - -[[package]] -name = "fqdn" -version = "1.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/3e/a80a8c077fd798951169626cde3e239adeba7dab75deb3555716415bd9b0/fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f", size = 6015, upload-time = "2021-03-11T07:16:29.08Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/58/8acf1b3e91c58313ce5cb67df61001fc9dcd21be4fadb76c1a2d540e09ed/fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014", size = 9121, upload-time = "2021-03-11T07:16:28.351Z" }, -] - -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, -] - -[[package]] -name = "hypothesis" -version = "6.135.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "sortedcontainers" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ae/59/7022ef95715701cd90ac0cf04582e3507492ab200f370fd7ef12d80dda75/hypothesis-6.135.4.tar.gz", hash = "sha256:c63f6fc56840558c5c5e2441dd91fad1709da60bde756b816d4b89944e50a52f", size = 451895, upload-time = "2025-06-09T02:31:38.766Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/d4/25b3a9f35199eb1904967ca3e6db4afd636911fa39695760b0afac84f38a/hypothesis-6.135.4-py3-none-any.whl", hash = "sha256:6a3b13ce35d43e14aaf6a6ca4cc411e5342be5d05b77977499d07cf6a61e6e71", size = 517950, upload-time = "2025-06-09T02:31:34.463Z" }, -] - -[package.optional-dependencies] -lark = [ - { name = "lark" }, -] - -[[package]] -name = "hypothesmith" -version = "0.3.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "hypothesis", extra = ["lark"] }, - { name = "libcst" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e3/f6/1a64114dee6c46985482c35bdbc12025db59973a0225eec47ac4d306030f/hypothesmith-0.3.3.tar.gz", hash = "sha256:96c14802d6c8e85d8975264176878db54b28d2ed921fdbfedc2e6b8ce3c81716", size = 25529, upload-time = "2024-02-16T20:21:24.511Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/bc/78dcf42c6eaaf7d628f061f1e533a596f5bca2a53be2b714adc5d370d48e/hypothesmith-0.3.3-py3-none-any.whl", hash = "sha256:fdb0172f9de97d09450da40da7da083fdd118bcd2f88b1a2289413d2d496b1b1", size = 19247, upload-time = "2024-02-16T20:20:47.059Z" }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, -] - -[[package]] -name = "imagesize" -version = "1.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, -] - -[[package]] -name = "importlib-metadata" -version = "8.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "zipp", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, -] - -[[package]] -name = "intervaltree" -version = "3.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "sortedcontainers" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/50/fb/396d568039d21344639db96d940d40eb62befe704ef849b27949ded5c3bb/intervaltree-3.1.0.tar.gz", hash = "sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d", size = 32861, upload-time = "2020-08-03T08:01:11.392Z" } - -[[package]] -name = "ipykernel" -version = "6.29.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "appnope", marker = "sys_platform == 'darwin'" }, - { name = "comm" }, - { name = "debugpy" }, - { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "jupyter-client" }, - { name = "jupyter-core" }, - { name = "matplotlib-inline" }, - { name = "nest-asyncio" }, - { name = "packaging" }, - { name = "psutil" }, - { name = "pyzmq" }, - { name = "tornado" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e9/5c/67594cb0c7055dc50814b21731c22a601101ea3b1b50a9a1b090e11f5d0f/ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215", size = 163367, upload-time = "2024-07-01T14:07:22.543Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/5c/368ae6c01c7628438358e6d337c19b05425727fbb221d2a3c4303c372f42/ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5", size = 117173, upload-time = "2024-07-01T14:07:19.603Z" }, -] - -[[package]] -name = "ipython" -version = "8.18.1" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version < '3.10'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.10'" }, - { name = "jedi", marker = "python_full_version < '3.10'" }, - { name = "matplotlib-inline", marker = "python_full_version < '3.10'" }, - { name = "pexpect", marker = "python_full_version < '3.10' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "stack-data", marker = "python_full_version < '3.10'" }, - { name = "traitlets", marker = "python_full_version < '3.10'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/b9/3ba6c45a6df813c09a48bac313c22ff83efa26cbb55011218d925a46e2ad/ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27", size = 5486330, upload-time = "2023-11-27T09:58:34.596Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/6b/d9fdcdef2eb6a23f391251fde8781c38d42acd82abe84d054cb74f7863b0/ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397", size = 808161, upload-time = "2023-11-27T09:58:30.538Z" }, -] - -[[package]] -name = "ipython" -version = "8.37.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version == '3.10.*'" }, - { name = "exceptiongroup", marker = "python_full_version == '3.10.*'" }, - { name = "jedi", marker = "python_full_version == '3.10.*'" }, - { name = "matplotlib-inline", marker = "python_full_version == '3.10.*'" }, - { name = "pexpect", marker = "python_full_version == '3.10.*' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version == '3.10.*'" }, - { name = "pygments", marker = "python_full_version == '3.10.*'" }, - { name = "stack-data", marker = "python_full_version == '3.10.*'" }, - { name = "traitlets", marker = "python_full_version == '3.10.*'" }, - { name = "typing-extensions", marker = "python_full_version == '3.10.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/85/31/10ac88f3357fc276dc8a64e8880c82e80e7459326ae1d0a211b40abf6665/ipython-8.37.0.tar.gz", hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216", size = 5606088, upload-time = "2025-05-31T16:39:09.613Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/d0/274fbf7b0b12643cbbc001ce13e6a5b1607ac4929d1b11c72460152c9fc3/ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2", size = 831864, upload-time = "2025-05-31T16:39:06.38Z" }, -] - -[[package]] -name = "ipython" -version = "9.3.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14'", - "python_full_version == '3.13.*'", - "python_full_version >= '3.11' and python_full_version < '3.13'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version >= '3.11'" }, - { name = "ipython-pygments-lexers", marker = "python_full_version >= '3.11'" }, - { name = "jedi", marker = "python_full_version >= '3.11'" }, - { name = "matplotlib-inline", marker = "python_full_version >= '3.11'" }, - { name = "pexpect", marker = "python_full_version >= '3.11' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version >= '3.11'" }, - { name = "pygments", marker = "python_full_version >= '3.11'" }, - { name = "stack-data", marker = "python_full_version >= '3.11'" }, - { name = "traitlets", marker = "python_full_version >= '3.11'" }, - { name = "typing-extensions", marker = "python_full_version == '3.11.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/dc/09/4c7e06b96fbd203e06567b60fb41b06db606b6a82db6db7b2c85bb72a15c/ipython-9.3.0.tar.gz", hash = "sha256:79eb896f9f23f50ad16c3bc205f686f6e030ad246cc309c6279a242b14afe9d8", size = 4426460, upload-time = "2025-05-31T16:34:55.678Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/99/9ed3d52d00f1846679e3aa12e2326ac7044b5e7f90dc822b60115fa533ca/ipython-9.3.0-py3-none-any.whl", hash = "sha256:1a0b6dd9221a1f5dddf725b57ac0cb6fddc7b5f470576231ae9162b9b3455a04", size = 605320, upload-time = "2025-05-31T16:34:52.154Z" }, -] - -[[package]] -name = "ipython-pygments-lexers" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pygments", marker = "python_full_version >= '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, -] - -[[package]] -name = "ipywidgets" -version = "8.1.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "comm" }, - { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "jupyterlab-widgets" }, - { name = "traitlets" }, - { name = "widgetsnbextension" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3e/48/d3dbac45c2814cb73812f98dd6b38bbcc957a4e7bb31d6ea9c03bf94ed87/ipywidgets-8.1.7.tar.gz", hash = "sha256:15f1ac050b9ccbefd45dccfbb2ef6bed0029d8278682d569d71b8dd96bee0376", size = 116721, upload-time = "2025-05-05T12:42:03.489Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/58/6a/9166369a2f092bd286d24e6307de555d63616e8ddb373ebad2b5635ca4cd/ipywidgets-8.1.7-py3-none-any.whl", hash = "sha256:764f2602d25471c213919b8a1997df04bef869251db4ca8efba1b76b1bd9f7bb", size = 139806, upload-time = "2025-05-05T12:41:56.833Z" }, -] - -[[package]] -name = "isoduration" -version = "20.11.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "arrow" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7c/1a/3c8edc664e06e6bd06cce40c6b22da5f1429aa4224d0c590f3be21c91ead/isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9", size = 11649, upload-time = "2020-11-01T11:00:00.312Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/55/e5326141505c5d5e34c5e0935d2908a74e4561eca44108fbfb9c13d2911a/isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042", size = 11321, upload-time = "2020-11-01T10:59:58.02Z" }, -] - -[[package]] -name = "jedi" -version = "0.19.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "parso" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, -] - -[[package]] -name = "jinja2" -version = "3.1.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, -] - -[[package]] -name = "json5" -version = "0.12.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/12/be/c6c745ec4c4539b25a278b70e29793f10382947df0d9efba2fa09120895d/json5-0.12.0.tar.gz", hash = "sha256:0b4b6ff56801a1c7dc817b0241bca4ce474a0e6a163bfef3fc594d3fd263ff3a", size = 51907, upload-time = "2025-04-03T16:33:13.201Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/41/9f/3500910d5a98549e3098807493851eeef2b89cdd3032227558a104dfe926/json5-0.12.0-py3-none-any.whl", hash = "sha256:6d37aa6c08b0609f16e1ec5ff94697e2cbbfbad5ac112afa05794da9ab7810db", size = 36079, upload-time = "2025-04-03T16:33:11.927Z" }, -] - -[[package]] -name = "jsonpointer" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, -] - -[[package]] -name = "jsonschema" -version = "4.24.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "jsonschema-specifications" }, - { name = "referencing" }, - { name = "rpds-py" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload-time = "2025-05-26T18:48:10.459Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, -] - -[package.optional-dependencies] -format-nongpl = [ - { name = "fqdn" }, - { name = "idna" }, - { name = "isoduration" }, - { name = "jsonpointer" }, - { name = "rfc3339-validator" }, - { name = "rfc3986-validator" }, - { name = "uri-template" }, - { name = "webcolors" }, -] - -[[package]] -name = "jsonschema-specifications" -version = "2025.4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "referencing" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, -] - -[[package]] -name = "jupyter" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ipykernel" }, - { name = "ipywidgets" }, - { name = "jupyter-console" }, - { name = "jupyterlab" }, - { name = "nbconvert" }, - { name = "notebook" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/58/f3/af28ea964ab8bc1e472dba2e82627d36d470c51f5cd38c37502eeffaa25e/jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a", size = 5714959, upload-time = "2024-08-30T07:15:48.299Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/64/285f20a31679bf547b75602702f7800e74dbabae36ef324f716c02804753/jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83", size = 2657, upload-time = "2024-08-30T07:15:47.045Z" }, -] - -[[package]] -name = "jupyter-client" -version = "8.6.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "jupyter-core" }, - { name = "python-dateutil" }, - { name = "pyzmq" }, - { name = "tornado" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019, upload-time = "2024-09-17T10:44:17.613Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105, upload-time = "2024-09-17T10:44:15.218Z" }, -] - -[[package]] -name = "jupyter-console" -version = "6.6.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ipykernel" }, - { name = "ipython", version = "8.18.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "ipython", version = "8.37.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "jupyter-client" }, - { name = "jupyter-core" }, - { name = "prompt-toolkit" }, - { name = "pygments" }, - { name = "pyzmq" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bd/2d/e2fd31e2fc41c14e2bcb6c976ab732597e907523f6b2420305f9fc7fdbdb/jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539", size = 34363, upload-time = "2023-03-06T14:13:31.02Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/77/71d78d58f15c22db16328a476426f7ac4a60d3a5a7ba3b9627ee2f7903d4/jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485", size = 24510, upload-time = "2023-03-06T14:13:28.229Z" }, -] - -[[package]] -name = "jupyter-core" -version = "5.8.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "platformdirs" }, - { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/99/1b/72906d554acfeb588332eaaa6f61577705e9ec752ddb486f302dafa292d9/jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941", size = 88923, upload-time = "2025-05-27T07:38:16.655Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/57/6bffd4b20b88da3800c5d691e0337761576ee688eb01299eae865689d2df/jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0", size = 28880, upload-time = "2025-05-27T07:38:15.137Z" }, -] - -[[package]] -name = "jupyter-events" -version = "0.12.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jsonschema", extra = ["format-nongpl"] }, - { name = "packaging" }, - { name = "python-json-logger" }, - { name = "pyyaml", version = "6.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, - { name = "pyyaml", version = "6.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, - { name = "referencing" }, - { name = "rfc3339-validator" }, - { name = "rfc3986-validator" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9d/c3/306d090461e4cf3cd91eceaff84bede12a8e52cd821c2d20c9a4fd728385/jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b", size = 62196, upload-time = "2025-02-03T17:23:41.485Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/48/577993f1f99c552f18a0428731a755e06171f9902fa118c379eb7c04ea22/jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb", size = 19430, upload-time = "2025-02-03T17:23:38.643Z" }, -] - -[[package]] -name = "jupyter-lsp" -version = "2.2.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "jupyter-server" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/85/b4/3200b0b09c12bc3b72d943d923323c398eff382d1dcc7c0dbc8b74630e40/jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001", size = 48741, upload-time = "2024-04-09T17:59:44.918Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/e0/7bd7cff65594fd9936e2f9385701e44574fc7d721331ff676ce440b14100/jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da", size = 69146, upload-time = "2024-04-09T17:59:43.388Z" }, -] - -[[package]] -name = "jupyter-server" -version = "2.16.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "argon2-cffi" }, - { name = "jinja2" }, - { name = "jupyter-client" }, - { name = "jupyter-core" }, - { name = "jupyter-events" }, - { name = "jupyter-server-terminals" }, - { name = "nbconvert" }, - { name = "nbformat" }, - { name = "overrides" }, - { name = "packaging" }, - { name = "prometheus-client" }, - { name = "pywinpty", marker = "os_name == 'nt'" }, - { name = "pyzmq" }, - { name = "send2trash" }, - { name = "terminado" }, - { name = "tornado" }, - { name = "traitlets" }, - { name = "websocket-client" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/41/c8/ba2bbcd758c47f1124c4ca14061e8ce60d9c6fd537faee9534a95f83521a/jupyter_server-2.16.0.tar.gz", hash = "sha256:65d4b44fdf2dcbbdfe0aa1ace4a842d4aaf746a2b7b168134d5aaed35621b7f6", size = 728177, upload-time = "2025-05-12T16:44:46.245Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/46/1f/5ebbced977171d09a7b0c08a285ff9a20aafb9c51bde07e52349ff1ddd71/jupyter_server-2.16.0-py3-none-any.whl", hash = "sha256:3d8db5be3bc64403b1c65b400a1d7f4647a5ce743f3b20dbdefe8ddb7b55af9e", size = 386904, upload-time = "2025-05-12T16:44:43.335Z" }, -] - -[[package]] -name = "jupyter-server-terminals" -version = "0.5.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pywinpty", marker = "os_name == 'nt'" }, - { name = "terminado" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/d5/562469734f476159e99a55426d697cbf8e7eb5efe89fb0e0b4f83a3d3459/jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269", size = 31430, upload-time = "2024-03-12T14:37:03.049Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/2d/2b32cdbe8d2a602f697a649798554e4f072115438e92249624e532e8aca6/jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa", size = 13656, upload-time = "2024-03-12T14:37:00.708Z" }, -] - -[[package]] -name = "jupyterlab" -version = "4.4.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "async-lru" }, - { name = "httpx" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "ipykernel" }, - { name = "jinja2" }, - { name = "jupyter-core" }, - { name = "jupyter-lsp" }, - { name = "jupyter-server" }, - { name = "jupyterlab-server" }, - { name = "notebook-shim" }, - { name = "packaging" }, - { name = "setuptools" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "tornado" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d3/2d/d1678dcf2db66cb4a38a80d9e5fcf48c349f3ac12f2d38882993353ae768/jupyterlab-4.4.3.tar.gz", hash = "sha256:a94c32fd7f8b93e82a49dc70a6ec45a5c18281ca2a7228d12765e4e210e5bca2", size = 23032376, upload-time = "2025-05-26T11:18:00.996Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/4d/7dd5c2ffbb960930452a031dc8410746183c924580f2ab4e68ceb5b3043f/jupyterlab-4.4.3-py3-none-any.whl", hash = "sha256:164302f6d4b6c44773dfc38d585665a4db401a16e5296c37df5cba63904fbdea", size = 12295480, upload-time = "2025-05-26T11:17:56.607Z" }, -] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900, upload-time = "2023-11-23T09:26:37.44Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884, upload-time = "2023-11-23T09:26:34.325Z" }, -] - -[[package]] -name = "jupyterlab-server" -version = "2.27.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "babel" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "jinja2" }, - { name = "json5" }, - { name = "jsonschema" }, - { name = "jupyter-server" }, - { name = "packaging" }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0a/c9/a883ce65eb27905ce77ace410d83587c82ea64dc85a48d1f7ed52bcfa68d/jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4", size = 76173, upload-time = "2024-07-16T17:02:04.149Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/54/09/2032e7d15c544a0e3cd831c51d77a8ca57f7555b2e1b2922142eddb02a84/jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4", size = 59700, upload-time = "2024-07-16T17:02:01.115Z" }, -] - -[[package]] -name = "jupyterlab-widgets" -version = "3.0.15" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b9/7d/160595ca88ee87ac6ba95d82177d29ec60aaa63821d3077babb22ce031a5/jupyterlab_widgets-3.0.15.tar.gz", hash = "sha256:2920888a0c2922351a9202817957a68c07d99673504d6cd37345299e971bb08b", size = 213149, upload-time = "2025-05-05T12:32:31.004Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/6a/ca128561b22b60bd5a0c4ea26649e68c8556b82bc70a0c396eebc977fe86/jupyterlab_widgets-3.0.15-py3-none-any.whl", hash = "sha256:d59023d7d7ef71400d51e6fee9a88867f6e65e10a4201605d2d7f3e8f012a31c", size = 216571, upload-time = "2025-05-05T12:32:29.534Z" }, -] - -[[package]] -name = "lark" -version = "1.2.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/60/bc7622aefb2aee1c0b4ba23c1446d3e30225c8770b38d7aedbfb65ca9d5a/lark-1.2.2.tar.gz", hash = "sha256:ca807d0162cd16cef15a8feecb862d7319e7a09bdb13aef927968e45040fed80", size = 252132, upload-time = "2024-08-13T19:49:00.652Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/00/d90b10b962b4277f5e64a78b6609968859ff86889f5b898c1a778c06ec00/lark-1.2.2-py3-none-any.whl", hash = "sha256:c2276486b02f0f1b90be155f2c8ba4a8e194d42775786db622faccd652d8e80c", size = 111036, upload-time = "2024-08-13T19:48:58.603Z" }, -] - -[[package]] -name = "libcst" -source = { editable = "." } -dependencies = [ - { name = "pyyaml", version = "6.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.13'" }, - { name = "pyyaml", version = "6.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, - { name = "pyyaml-ft", marker = "python_full_version == '3.13.*'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] - -[package.dev-dependencies] -dev = [ - { name = "black" }, - { name = "build" }, - { name = "coverage", extra = ["toml"] }, - { name = "fixit" }, - { name = "flake8" }, - { name = "hypothesis" }, - { name = "hypothesmith" }, - { name = "maturin" }, - { name = "poethepoet" }, - { name = "prompt-toolkit" }, - { name = "pyre-check", marker = "sys_platform != 'win32'" }, - { name = "setuptools-rust" }, - { name = "setuptools-scm" }, - { name = "slotscheck" }, - { name = "ufmt" }, - { name = "usort" }, -] -docs = [ - { name = "black" }, - { name = "build" }, - { name = "coverage", extra = ["toml"] }, - { name = "fixit" }, - { name = "flake8" }, - { name = "hypothesis" }, - { name = "hypothesmith" }, - { name = "jinja2" }, - { name = "jupyter" }, - { name = "maturin" }, - { name = "nbsphinx" }, - { name = "poethepoet" }, - { name = "prompt-toolkit" }, - { name = "pyre-check", marker = "sys_platform != 'win32'" }, - { name = "setuptools-rust" }, - { name = "setuptools-scm" }, - { name = "slotscheck" }, - { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "sphinx-rtd-theme" }, - { name = "ufmt" }, - { name = "usort" }, -] - -[package.metadata] -requires-dist = [ - { name = "pyyaml", marker = "python_full_version < '3.13'", specifier = ">=5.2" }, - { name = "pyyaml", marker = "python_full_version >= '3.14'", specifier = ">=6.0.3" }, - { name = "pyyaml-ft", marker = "python_full_version == '3.13.*'", specifier = ">=8.0.0" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "black", specifier = "==25.1.0" }, - { name = "build", specifier = ">=0.10.0" }, - { name = "coverage", extras = ["toml"], specifier = ">=4.5.4" }, - { name = "fixit", specifier = "==2.1.0" }, - { name = "flake8", specifier = "==7.2.0" }, - { name = "hypothesis", specifier = ">=4.36.0" }, - { name = "hypothesmith", specifier = ">=0.0.4" }, - { name = "maturin", specifier = ">=1.7.0,<1.8" }, - { name = "poethepoet", specifier = ">=0.35.0" }, - { name = "prompt-toolkit", specifier = ">=2.0.9" }, - { name = "pyre-check", marker = "sys_platform != 'win32'", specifier = "==0.9.18" }, - { name = "setuptools-rust", specifier = ">=1.5.2" }, - { name = "setuptools-scm", specifier = ">=6.0.1" }, - { name = "slotscheck", specifier = ">=0.7.1" }, - { name = "ufmt", specifier = "==2.8.0" }, - { name = "usort", specifier = "==1.0.8.post1" }, -] -docs = [ - { name = "black", specifier = "==25.1.0" }, - { name = "build", specifier = ">=0.10.0" }, - { name = "coverage", extras = ["toml"], specifier = ">=4.5.4" }, - { name = "fixit", specifier = "==2.1.0" }, - { name = "flake8", specifier = "==7.2.0" }, - { name = "hypothesis", specifier = ">=4.36.0" }, - { name = "hypothesmith", specifier = ">=0.0.4" }, - { name = "jinja2", specifier = "==3.1.6" }, - { name = "jupyter", specifier = ">=1.0.0" }, - { name = "maturin", specifier = ">=1.7.0,<1.8" }, - { name = "nbsphinx", specifier = ">=0.4.2" }, - { name = "poethepoet", specifier = ">=0.35.0" }, - { name = "prompt-toolkit", specifier = ">=2.0.9" }, - { name = "pyre-check", marker = "sys_platform != 'win32'", specifier = "==0.9.18" }, - { name = "setuptools-rust", specifier = ">=1.5.2" }, - { name = "setuptools-scm", specifier = ">=6.0.1" }, - { name = "slotscheck", specifier = ">=0.7.1" }, - { name = "sphinx", specifier = ">=5.1.1" }, - { name = "sphinx-rtd-theme", specifier = ">=0.4.3" }, - { name = "ufmt", specifier = "==2.8.0" }, - { name = "usort", specifier = "==1.0.8.post1" }, -] - -[[package]] -name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, - { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, - { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, - { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, - { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, - { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, - { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, - { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, - { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, - { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" }, - { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" }, - { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" }, - { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" }, - { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" }, - { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" }, - { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" }, - { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" }, - { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" }, - { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" }, -] - -[[package]] -name = "marshmallow" -version = "3.26.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "packaging" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825, upload-time = "2025-02-03T15:32:25.093Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878, upload-time = "2025-02-03T15:32:22.295Z" }, -] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159, upload-time = "2024-04-15T13:44:44.803Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899, upload-time = "2024-04-15T13:44:43.265Z" }, -] - -[[package]] -name = "maturin" -version = "1.7.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ab/1e/085ddc0e5b08ae7af7a743a0dd6ed06b22a1332288488f1a333137885150/maturin-1.7.8.tar.gz", hash = "sha256:649c6ef3f0fa4c5f596140d761dc5a4d577c485cc32fb5b9b344a8280352880d", size = 195704, upload-time = "2024-12-04T11:38:23.268Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/ed/c8bb26e91c879e418ae1b01630722ed20b6fe0e6755be8d538d83666f136/maturin-1.7.8-py3-none-linux_armv6l.whl", hash = "sha256:c6950fd2790acd93265e1501cea66f9249cff19724654424ca75a3b17ebb315b", size = 7515691, upload-time = "2024-12-04T11:37:55.443Z" }, - { url = "https://files.pythonhosted.org/packages/38/7a/573f969315f0b92a09a0a565d45e98812c87796e2e19a7856159ab234faf/maturin-1.7.8-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f98288d5c382bacf0c076871dfd50c38f1eb2248f417551e98dd6f47f6ee8afa", size = 14434454, upload-time = "2024-12-04T11:37:58.448Z" }, - { url = "https://files.pythonhosted.org/packages/a6/17/46834841fbf19231487f185e68b95ca348cc05cce49be8787e0bc7e9dc47/maturin-1.7.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b2d4e0f674ca29864e6b86c2eb9fee8236d1c7496c25f7300e34229272468f4c", size = 7509122, upload-time = "2024-12-04T11:38:01.355Z" }, - { url = "https://files.pythonhosted.org/packages/c1/8f/bf8b4871eb390a4baef2e0bb5016852c7c0311a9772e2945534cfa2ee40e/maturin-1.7.8-py3-none-manylinux_2_12_i686.manylinux2010_i686.musllinux_1_1_i686.whl", hash = "sha256:6cafb17bf57822bdc04423d9e3e766d42918d474848fe9833e397267514ba891", size = 7598870, upload-time = "2024-12-04T11:38:03.708Z" }, - { url = "https://files.pythonhosted.org/packages/dc/43/c842be67a7c59568082345249b956138ae93d0b2474fb41c186ce26d05e1/maturin-1.7.8-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.musllinux_1_1_x86_64.whl", hash = "sha256:2b2bdee0c3a84696b3a809054c43ead1a04b7b3321cbd5b8f5676e4ba4691d0f", size = 7932310, upload-time = "2024-12-04T11:38:05.463Z" }, - { url = "https://files.pythonhosted.org/packages/12/12/42435d05f2d6c75eb621751e6f021d29eb34d18e3b9c5c94d828744c2d54/maturin-1.7.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:b8188b71259fc2bc568d9c8acc186fcfed96f42539bcb55b8e6f4ec26e411f37", size = 7321964, upload-time = "2024-12-04T11:38:07.143Z" }, - { url = "https://files.pythonhosted.org/packages/b4/26/f3272ee985ebf9b3e8c4cd4f4efb022af1e12c9f53aed0dcc9a255399f4e/maturin-1.7.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:a4f58c2a53c2958a1bf090960b08b28e676136cd88ac2f5dfdcf1b14ea54ec06", size = 7408613, upload-time = "2024-12-04T11:38:09.814Z" }, - { url = "https://files.pythonhosted.org/packages/36/7d/be27bcc7d3ac6e6c2136a8ec0cc56f227a292d6cfdde55e095b6c0aa24a9/maturin-1.7.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.musllinux_1_1_ppc64le.whl", hash = "sha256:c5d6c0c631d1fc646cd3834795e6cfd72ab4271d289df7e0f911261a02bec75f", size = 9496974, upload-time = "2024-12-04T11:38:11.618Z" }, - { url = "https://files.pythonhosted.org/packages/e1/e8/0d7323e9a31c11edf69c4473d73eca74803ce3e2390abf8ae3ac7eb10b04/maturin-1.7.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c23664d19dadcbf800ef70f26afb2e0485a985c62889930934f019c565534c23", size = 10828401, upload-time = "2024-12-04T11:38:14.42Z" }, - { url = "https://files.pythonhosted.org/packages/7e/82/5080e052c0d8c9872f6d4b94cae84c17ed7f2ea270d709210ea6445b655f/maturin-1.7.8-py3-none-win32.whl", hash = "sha256:403eebf1afa6f19b49425f089e39c53b8e597bc86a47f3a76e828dc78d27fa80", size = 6845240, upload-time = "2024-12-04T11:38:17.162Z" }, - { url = "https://files.pythonhosted.org/packages/6d/c9/9b162361ded893f36038c2f8ac6a972ec441c11df8d17c440997eb28090f/maturin-1.7.8-py3-none-win_amd64.whl", hash = "sha256:1ce48d007438b895f8665314b6748ac0dab31e4f32049a60b52281dd2dccbdde", size = 7762332, upload-time = "2024-12-04T11:38:19.445Z" }, - { url = "https://files.pythonhosted.org/packages/fa/40/46d4742db742f69a7fe0054cd7c82bc79b2d70cb8c91f7e737e75c28a5f3/maturin-1.7.8-py3-none-win_arm64.whl", hash = "sha256:cc92a62953205e8945b6cfe6943d6a8576a4442d30d9c67141f944f4f4640e62", size = 6501353, upload-time = "2024-12-04T11:38:21.713Z" }, -] - -[[package]] -name = "mccabe" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658, upload-time = "2022-01-24T01:14:51.113Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350, upload-time = "2022-01-24T01:14:49.62Z" }, -] - -[[package]] -name = "mistune" -version = "3.1.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c4/79/bda47f7dd7c3c55770478d6d02c9960c430b0cf1773b72366ff89126ea31/mistune-3.1.3.tar.gz", hash = "sha256:a7035c21782b2becb6be62f8f25d3df81ccb4d6fa477a6525b15af06539f02a0", size = 94347, upload-time = "2025-03-19T14:27:24.955Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/01/4d/23c4e4f09da849e127e9f123241946c23c1e30f45a88366879e064211815/mistune-3.1.3-py3-none-any.whl", hash = "sha256:1a32314113cff28aa6432e99e522677c8587fd83e3d51c29b82a52409c842bd9", size = 53410, upload-time = "2025-03-19T14:27:23.451Z" }, -] - -[[package]] -name = "moreorless" -version = "0.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8d/85/2e4999ac4a21ab3c5f31e2a48e0989a80be3afc512a7983e3253615983d4/moreorless-0.5.0.tar.gz", hash = "sha256:560a04f85006fccd74feaa4b6213a446392ff7b5ec0194a5464b6c30f182fa33", size = 14093, upload-time = "2025-05-04T22:29:59.006Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/2e/9ea80ca55b73530b7639c6f146a58f636ddfe5a852ad467a44fe3e80d809/moreorless-0.5.0-py3-none-any.whl", hash = "sha256:66228870cd2f14bad5c3c3780aa71e29d3b2d9b5a01c03bfbf105efd4f668ecf", size = 14380, upload-time = "2025-05-04T22:29:57.417Z" }, -] - -[[package]] -name = "mypy-extensions" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, -] - -[[package]] -name = "nbclient" -version = "0.10.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jupyter-client" }, - { name = "jupyter-core" }, - { name = "nbformat" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/87/66/7ffd18d58eae90d5721f9f39212327695b749e23ad44b3881744eaf4d9e8/nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193", size = 62424, upload-time = "2024-12-19T10:32:27.164Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/34/6d/e7fa07f03a4a7b221d94b4d586edb754a9b0dc3c9e2c93353e9fa4e0d117/nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d", size = 25434, upload-time = "2024-12-19T10:32:24.139Z" }, -] - -[[package]] -name = "nbconvert" -version = "7.16.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "beautifulsoup4" }, - { name = "bleach", extra = ["css"] }, - { name = "defusedxml" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "jinja2" }, - { name = "jupyter-core" }, - { name = "jupyterlab-pygments" }, - { name = "markupsafe" }, - { name = "mistune" }, - { name = "nbclient" }, - { name = "nbformat" }, - { name = "packaging" }, - { name = "pandocfilters" }, - { name = "pygments" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" }, -] - -[[package]] -name = "nbformat" -version = "5.10.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "fastjsonschema" }, - { name = "jsonschema" }, - { name = "jupyter-core" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749, upload-time = "2024-04-04T11:20:37.371Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454, upload-time = "2024-04-04T11:20:34.895Z" }, -] - -[[package]] -name = "nbsphinx" -version = "0.9.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "jinja2" }, - { name = "nbconvert" }, - { name = "nbformat" }, - { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1e/84/b1856b7651ac34e965aa567a158714c7f3bd42a1b1ce76bf423ffb99872c/nbsphinx-0.9.7.tar.gz", hash = "sha256:abd298a686d55fa894ef697c51d44f24e53aa312dadae38e82920f250a5456fe", size = 180479, upload-time = "2025-03-03T19:46:08.069Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/49/2d/8c8e635bcc6757573d311bb3c5445426382f280da32b8cd6d82d501ef4a4/nbsphinx-0.9.7-py3-none-any.whl", hash = "sha256:7292c3767fea29e405c60743eee5393682a83982ab202ff98f5eb2db02629da8", size = 31660, upload-time = "2025-03-03T19:46:06.581Z" }, -] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, -] - -[[package]] -name = "notebook" -version = "7.4.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jupyter-server" }, - { name = "jupyterlab" }, - { name = "jupyterlab-server" }, - { name = "notebook-shim" }, - { name = "tornado" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/dc/21/4f83b15e483da4f4f63928edd0cb08b6e7d33f8a15c23b116a90c44c6235/notebook-7.4.3.tar.gz", hash = "sha256:a1567481cd3853f2610ee0ecf5dfa12bb508e878ee8f92152c134ef7f0568a76", size = 13881668, upload-time = "2025-05-26T14:27:21.656Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/1b/16c809d799e3ddd7a97c8b43734f79624b74ddef9707e7d92275a13777bc/notebook-7.4.3-py3-none-any.whl", hash = "sha256:9cdeee954e04101cadb195d90e2ab62b7c9286c1d4f858bf3bb54e40df16c0c3", size = 14286402, upload-time = "2025-05-26T14:27:17.339Z" }, -] - -[[package]] -name = "notebook-shim" -version = "0.2.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jupyter-server" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/54/d2/92fa3243712b9a3e8bafaf60aac366da1cada3639ca767ff4b5b3654ec28/notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb", size = 13167, upload-time = "2024-02-14T23:35:18.353Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/33/bd5b9137445ea4b680023eb0469b2bb969d61303dedb2aac6560ff3d14a1/notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef", size = 13307, upload-time = "2024-02-14T23:35:16.286Z" }, -] - -[[package]] -name = "overrides" -version = "7.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812, upload-time = "2024-01-27T21:01:33.423Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832, upload-time = "2024-01-27T21:01:31.393Z" }, -] - -[[package]] -name = "packaging" -version = "25.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, -] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454, upload-time = "2024-01-18T20:08:13.726Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663, upload-time = "2024-01-18T20:08:11.28Z" }, -] - -[[package]] -name = "parso" -version = "0.8.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d", size = 400609, upload-time = "2024-04-05T09:43:55.897Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650, upload-time = "2024-04-05T09:43:53.299Z" }, -] - -[[package]] -name = "pastel" -version = "0.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/f1/4594f5e0fcddb6953e5b8fe00da8c317b8b41b547e2b3ae2da7512943c62/pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d", size = 7555, upload-time = "2020-09-16T19:21:12.43Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/18/a8444036c6dd65ba3624c63b734d3ba95ba63ace513078e1580590075d21/pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364", size = 5955, upload-time = "2020-09-16T19:21:11.409Z" }, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ptyprocess" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, -] - -[[package]] -name = "platformdirs" -version = "4.3.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, -] - -[[package]] -name = "poethepoet" -version = "0.35.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pastel" }, - { name = "pyyaml", version = "6.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" }, - { name = "pyyaml", version = "6.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d6/b1/d4f4361b278fae10f6074675385ce3acf53c647f8e6eeba22c652f8ba985/poethepoet-0.35.0.tar.gz", hash = "sha256:b396ae862d7626e680bbd0985b423acf71634ce93a32d8b5f38340f44f5fbc3e", size = 66006, upload-time = "2025-06-09T12:58:18.849Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/08/abc2d7e2400dd8906e3208f9b88ac610f097d7ee0c7a1fa4a157b49a9e86/poethepoet-0.35.0-py3-none-any.whl", hash = "sha256:bed5ae1fd63f179dfa67aabb93fa253d79695c69667c927d8b24ff378799ea75", size = 87164, upload-time = "2025-06-09T12:58:17.084Z" }, -] - -[[package]] -name = "prometheus-client" -version = "0.22.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/cf/40dde0a2be27cc1eb41e333d1a674a74ce8b8b0457269cc640fd42b07cf7/prometheus_client-0.22.1.tar.gz", hash = "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28", size = 69746, upload-time = "2025-06-02T14:29:01.152Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/ae/ec06af4fe3ee72d16973474f122541746196aaa16cea6f66d18b963c6177/prometheus_client-0.22.1-py3-none-any.whl", hash = "sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094", size = 58694, upload-time = "2025-06-02T14:29:00.068Z" }, -] - -[[package]] -name = "prompt-toolkit" -version = "3.0.51" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wcwidth" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940, upload-time = "2025-04-15T09:18:47.731Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" }, -] - -[[package]] -name = "psutil" -version = "7.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, - { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, - { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, - { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, - { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, -] - -[[package]] -name = "pycodestyle" -version = "2.13.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/04/6e/1f4a62078e4d95d82367f24e685aef3a672abfd27d1a868068fed4ed2254/pycodestyle-2.13.0.tar.gz", hash = "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae", size = 39312, upload-time = "2025-03-29T17:33:30.669Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/be/b00116df1bfb3e0bb5b45e29d604799f7b91dd861637e4d448b4e09e6a3e/pycodestyle-2.13.0-py2.py3-none-any.whl", hash = "sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9", size = 31424, upload-time = "2025-03-29T17:33:29.405Z" }, -] - -[[package]] -name = "pycparser" -version = "2.22" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, -] - -[[package]] -name = "pyflakes" -version = "3.3.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/cc/1df338bd7ed1fa7c317081dcf29bf2f01266603b301e6858856d346a12b3/pyflakes-3.3.2.tar.gz", hash = "sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b", size = 64175, upload-time = "2025-03-31T13:21:20.34Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/40/b293a4fa769f3b02ab9e387c707c4cbdc34f073f945de0386107d4e669e6/pyflakes-3.3.2-py2.py3-none-any.whl", hash = "sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a", size = 63164, upload-time = "2025-03-31T13:21:18.503Z" }, -] - -[[package]] -name = "pygments" -version = "2.19.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, -] - -[[package]] -name = "pyproject-hooks" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/82/28175b2414effca1cdac8dc99f76d660e7a4fb0ceefa4b4ab8f5f6742925/pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8", size = 19228, upload-time = "2024-09-29T09:24:13.293Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216, upload-time = "2024-09-29T09:24:11.978Z" }, -] - -[[package]] -name = "pyre-check" -version = "0.9.18" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "dataclasses-json" }, - { name = "intervaltree" }, - { name = "libcst" }, - { name = "psutil" }, - { name = "pyre-extensions" }, - { name = "tabulate" }, - { name = "testslide" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/98/02/a92e10ecddce435f794493e18e1c0add477e3c307023525a49cffa299163/pyre-check-0.9.18.tar.gz", hash = "sha256:d5eb6db9011a7207189ecd0eaf32951e46cb0769c0f96a78fd0b90e633c9df2c", size = 18030825, upload-time = "2023-02-14T00:59:29.593Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/d9/5207ccd2eff3870b44f11c2db6b6d8e31cbcaca973a1b5ba4ac6d2460e41/pyre_check-0.9.18-py3-none-macosx_10_11_x86_64.whl", hash = "sha256:22633f5af3b986d266451a9e386a32414f8868de0a94226c7766f81eb080c59d", size = 19378418, upload-time = "2023-02-14T00:59:24.891Z" }, - { url = "https://files.pythonhosted.org/packages/33/07/865a1ca2a57fc2e9a0f78e005938a465b8a2ff748538fb5a0c1c19cb661f/pyre_check-0.9.18-py3-none-manylinux1_x86_64.whl", hash = "sha256:5659d4dbd6d1dd3052359861d828419f07d1ced1dad4ce4ca79071d252699c26", size = 23486523, upload-time = "2023-02-14T00:59:21.022Z" }, -] - -[[package]] -name = "pyre-extensions" -version = "0.0.32" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, - { name = "typing-inspect" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a7/53/5bc2532536e921c48366ad1047c1344ccef6afa5e84053f0f6e20a453767/pyre_extensions-0.0.32.tar.gz", hash = "sha256:5396715f14ea56c4d5fd0a88c57ca7e44faa468f905909edd7de4ad90ed85e55", size = 10852, upload-time = "2024-11-22T19:26:44.152Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/7a/9812cb8be9828ab688203c5ac5f743c60652887f0c00995a6f6f19f912bd/pyre_extensions-0.0.32-py3-none-any.whl", hash = "sha256:a63ba6883ab02f4b1a9f372ed4eb4a2f4c6f3d74879aa2725186fdfcfe3e5c68", size = 12766, upload-time = "2024-11-22T19:26:42.465Z" }, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, -] - -[[package]] -name = "python-json-logger" -version = "3.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9e/de/d3144a0bceede957f961e975f3752760fbe390d57fbe194baf709d8f1f7b/python_json_logger-3.3.0.tar.gz", hash = "sha256:12b7e74b17775e7d565129296105bbe3910842d9d0eb083fc83a6a617aa8df84", size = 16642, upload-time = "2025-03-07T07:08:27.301Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/08/20/0f2523b9e50a8052bc6a8b732dfc8568abbdc42010aef03a2d750bdab3b2/python_json_logger-3.3.0-py3-none-any.whl", hash = "sha256:dd980fae8cffb24c13caf6e158d3d61c0d6d22342f932cb6e9deedab3d35eec7", size = 15163, upload-time = "2025-03-07T07:08:25.627Z" }, -] - -[[package]] -name = "pywin32" -version = "310" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/da/a5f38fffbba2fb99aa4aa905480ac4b8e83ca486659ac8c95bce47fb5276/pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1", size = 8848240, upload-time = "2025-03-17T00:55:46.783Z" }, - { url = "https://files.pythonhosted.org/packages/aa/fe/d873a773324fa565619ba555a82c9dabd677301720f3660a731a5d07e49a/pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d", size = 9601854, upload-time = "2025-03-17T00:55:48.783Z" }, - { url = "https://files.pythonhosted.org/packages/3c/84/1a8e3d7a15490d28a5d816efa229ecb4999cdc51a7c30dd8914f669093b8/pywin32-310-cp310-cp310-win_arm64.whl", hash = "sha256:33babed0cf0c92a6f94cc6cc13546ab24ee13e3e800e61ed87609ab91e4c8213", size = 8522963, upload-time = "2025-03-17T00:55:50.969Z" }, - { url = "https://files.pythonhosted.org/packages/f7/b1/68aa2986129fb1011dabbe95f0136f44509afaf072b12b8f815905a39f33/pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd", size = 8784284, upload-time = "2025-03-17T00:55:53.124Z" }, - { url = "https://files.pythonhosted.org/packages/b3/bd/d1592635992dd8db5bb8ace0551bc3a769de1ac8850200cfa517e72739fb/pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c", size = 9520748, upload-time = "2025-03-17T00:55:55.203Z" }, - { url = "https://files.pythonhosted.org/packages/90/b1/ac8b1ffce6603849eb45a91cf126c0fa5431f186c2e768bf56889c46f51c/pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582", size = 8455941, upload-time = "2025-03-17T00:55:57.048Z" }, - { url = "https://files.pythonhosted.org/packages/6b/ec/4fdbe47932f671d6e348474ea35ed94227fb5df56a7c30cbbb42cd396ed0/pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d", size = 8796239, upload-time = "2025-03-17T00:55:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/e3/e5/b0627f8bb84e06991bea89ad8153a9e50ace40b2e1195d68e9dff6b03d0f/pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060", size = 9503839, upload-time = "2025-03-17T00:56:00.8Z" }, - { url = "https://files.pythonhosted.org/packages/1f/32/9ccf53748df72301a89713936645a664ec001abd35ecc8578beda593d37d/pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966", size = 8459470, upload-time = "2025-03-17T00:56:02.601Z" }, - { url = "https://files.pythonhosted.org/packages/1c/09/9c1b978ffc4ae53999e89c19c77ba882d9fce476729f23ef55211ea1c034/pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab", size = 8794384, upload-time = "2025-03-17T00:56:04.383Z" }, - { url = "https://files.pythonhosted.org/packages/45/3c/b4640f740ffebadd5d34df35fecba0e1cfef8fde9f3e594df91c28ad9b50/pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e", size = 9503039, upload-time = "2025-03-17T00:56:06.207Z" }, - { url = "https://files.pythonhosted.org/packages/b4/f4/f785020090fb050e7fb6d34b780f2231f302609dc964672f72bfaeb59a28/pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33", size = 8458152, upload-time = "2025-03-17T00:56:07.819Z" }, - { url = "https://files.pythonhosted.org/packages/a2/cd/d09d434630edb6a0c44ad5079611279a67530296cfe0451e003de7f449ff/pywin32-310-cp39-cp39-win32.whl", hash = "sha256:851c8d927af0d879221e616ae1f66145253537bbdd321a77e8ef701b443a9a1a", size = 8848099, upload-time = "2025-03-17T00:55:42.415Z" }, - { url = "https://files.pythonhosted.org/packages/93/ff/2a8c10315ffbdee7b3883ac0d1667e267ca8b3f6f640d81d43b87a82c0c7/pywin32-310-cp39-cp39-win_amd64.whl", hash = "sha256:96867217335559ac619f00ad70e513c0fcf84b8a3af9fc2bba3b59b97da70475", size = 9602031, upload-time = "2025-03-17T00:55:44.512Z" }, -] - -[[package]] -name = "pywinpty" -version = "2.0.15" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2d/7c/917f9c4681bb8d34bfbe0b79d36bbcd902651aeab48790df3d30ba0202fb/pywinpty-2.0.15.tar.gz", hash = "sha256:312cf39153a8736c617d45ce8b6ad6cd2107de121df91c455b10ce6bba7a39b2", size = 29017, upload-time = "2025-02-03T21:53:23.265Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/b7/855db919ae526d2628f3f2e6c281c4cdff7a9a8af51bb84659a9f07b1861/pywinpty-2.0.15-cp310-cp310-win_amd64.whl", hash = "sha256:8e7f5de756a615a38b96cd86fa3cd65f901ce54ce147a3179c45907fa11b4c4e", size = 1405161, upload-time = "2025-02-03T21:56:25.008Z" }, - { url = "https://files.pythonhosted.org/packages/5e/ac/6884dcb7108af66ad53f73ef4dad096e768c9203a6e6ce5e6b0c4a46e238/pywinpty-2.0.15-cp311-cp311-win_amd64.whl", hash = "sha256:9a6bcec2df2707aaa9d08b86071970ee32c5026e10bcc3cc5f6f391d85baf7ca", size = 1405249, upload-time = "2025-02-03T21:55:47.114Z" }, - { url = "https://files.pythonhosted.org/packages/88/e5/9714def18c3a411809771a3fbcec70bffa764b9675afb00048a620fca604/pywinpty-2.0.15-cp312-cp312-win_amd64.whl", hash = "sha256:83a8f20b430bbc5d8957249f875341a60219a4e971580f2ba694fbfb54a45ebc", size = 1405243, upload-time = "2025-02-03T21:56:52.476Z" }, - { url = "https://files.pythonhosted.org/packages/fb/16/2ab7b3b7f55f3c6929e5f629e1a68362981e4e5fed592a2ed1cb4b4914a5/pywinpty-2.0.15-cp313-cp313-win_amd64.whl", hash = "sha256:ab5920877dd632c124b4ed17bc6dd6ef3b9f86cd492b963ffdb1a67b85b0f408", size = 1405020, upload-time = "2025-02-03T21:56:04.753Z" }, - { url = "https://files.pythonhosted.org/packages/7c/16/edef3515dd2030db2795dbfbe392232c7a0f3dc41b98e92b38b42ba497c7/pywinpty-2.0.15-cp313-cp313t-win_amd64.whl", hash = "sha256:a4560ad8c01e537708d2790dbe7da7d986791de805d89dd0d3697ca59e9e4901", size = 1404151, upload-time = "2025-02-03T21:55:53.628Z" }, - { url = "https://files.pythonhosted.org/packages/47/96/90fa02f19b1eff7469ad7bf0ef8efca248025de9f1d0a0b25682d2aacf68/pywinpty-2.0.15-cp39-cp39-win_amd64.whl", hash = "sha256:d261cd88fcd358cfb48a7ca0700db3e1c088c9c10403c9ebc0d8a8b57aa6a117", size = 1405302, upload-time = "2025-02-03T21:55:40.394Z" }, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version == '3.13.*'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", - "python_full_version < '3.10'", -] -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, - { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, - { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, - { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, - { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, - { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, - { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, - { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, - { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, - { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777, upload-time = "2024-08-06T20:33:25.896Z" }, - { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318, upload-time = "2024-08-06T20:33:27.212Z" }, - { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891, upload-time = "2024-08-06T20:33:28.974Z" }, - { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614, upload-time = "2024-08-06T20:33:34.157Z" }, - { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360, upload-time = "2024-08-06T20:33:35.84Z" }, - { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006, upload-time = "2024-08-06T20:33:37.501Z" }, - { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577, upload-time = "2024-08-06T20:33:39.389Z" }, - { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593, upload-time = "2024-08-06T20:33:46.63Z" }, - { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" }, -] - -[[package]] -name = "pyyaml" -version = "6.0.3" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14'", -] -sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, - { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, - { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, - { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, - { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, - { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, - { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, - { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, - { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, - { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, - { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, - { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, - { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, - { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, - { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, - { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, - { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, - { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, - { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, - { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, - { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, - { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, - { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, - { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, - { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, - { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, - { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, - { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, - { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, - { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, - { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, - { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, - { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, - { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, - { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, - { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, - { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, - { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, - { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, - { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, - { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, - { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, - { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, - { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, - { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, - { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, - { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, - { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, - { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, - { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, - { url = "https://files.pythonhosted.org/packages/9f/62/67fc8e68a75f738c9200422bf65693fb79a4cd0dc5b23310e5202e978090/pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da", size = 184450, upload-time = "2025-09-25T21:33:00.618Z" }, - { url = "https://files.pythonhosted.org/packages/ae/92/861f152ce87c452b11b9d0977952259aa7df792d71c1053365cc7b09cc08/pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917", size = 174319, upload-time = "2025-09-25T21:33:02.086Z" }, - { url = "https://files.pythonhosted.org/packages/d0/cd/f0cfc8c74f8a030017a2b9c771b7f47e5dd702c3e28e5b2071374bda2948/pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9", size = 737631, upload-time = "2025-09-25T21:33:03.25Z" }, - { url = "https://files.pythonhosted.org/packages/ef/b2/18f2bd28cd2055a79a46c9b0895c0b3d987ce40ee471cecf58a1a0199805/pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5", size = 836795, upload-time = "2025-09-25T21:33:05.014Z" }, - { url = "https://files.pythonhosted.org/packages/73/b9/793686b2d54b531203c160ef12bec60228a0109c79bae6c1277961026770/pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a", size = 750767, upload-time = "2025-09-25T21:33:06.398Z" }, - { url = "https://files.pythonhosted.org/packages/a9/86/a137b39a611def2ed78b0e66ce2fe13ee701a07c07aebe55c340ed2a050e/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926", size = 727982, upload-time = "2025-09-25T21:33:08.708Z" }, - { url = "https://files.pythonhosted.org/packages/dd/62/71c27c94f457cf4418ef8ccc71735324c549f7e3ea9d34aba50874563561/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7", size = 755677, upload-time = "2025-09-25T21:33:09.876Z" }, - { url = "https://files.pythonhosted.org/packages/29/3d/6f5e0d58bd924fb0d06c3a6bad00effbdae2de5adb5cda5648006ffbd8d3/pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0", size = 142592, upload-time = "2025-09-25T21:33:10.983Z" }, - { url = "https://files.pythonhosted.org/packages/f0/0c/25113e0b5e103d7f1490c0e947e303fe4a696c10b501dea7a9f49d4e876c/pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007", size = 158777, upload-time = "2025-09-25T21:33:15.55Z" }, -] - -[[package]] -name = "pyyaml-ft" -version = "8.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/eb/5a0d575de784f9a1f94e2b1288c6886f13f34185e13117ed530f32b6f8a8/pyyaml_ft-8.0.0.tar.gz", hash = "sha256:0c947dce03954c7b5d38869ed4878b2e6ff1d44b08a0d84dc83fdad205ae39ab", size = 141057, upload-time = "2025-06-10T15:32:15.613Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/ba/a067369fe61a2e57fb38732562927d5bae088c73cb9bb5438736a9555b29/pyyaml_ft-8.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c1306282bc958bfda31237f900eb52c9bedf9b93a11f82e1aab004c9a5657a6", size = 187027, upload-time = "2025-06-10T15:31:48.722Z" }, - { url = "https://files.pythonhosted.org/packages/ad/c5/a3d2020ce5ccfc6aede0d45bcb870298652ac0cf199f67714d250e0cdf39/pyyaml_ft-8.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:30c5f1751625786c19de751e3130fc345ebcba6a86f6bddd6e1285342f4bbb69", size = 176146, upload-time = "2025-06-10T15:31:50.584Z" }, - { url = "https://files.pythonhosted.org/packages/e3/bb/23a9739291086ca0d3189eac7cd92b4d00e9fdc77d722ab610c35f9a82ba/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fa992481155ddda2e303fcc74c79c05eddcdbc907b888d3d9ce3ff3e2adcfb0", size = 746792, upload-time = "2025-06-10T15:31:52.304Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c2/e8825f4ff725b7e560d62a3609e31d735318068e1079539ebfde397ea03e/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cec6c92b4207004b62dfad1f0be321c9f04725e0f271c16247d8b39c3bf3ea42", size = 786772, upload-time = "2025-06-10T15:31:54.712Z" }, - { url = "https://files.pythonhosted.org/packages/35/be/58a4dcae8854f2fdca9b28d9495298fd5571a50d8430b1c3033ec95d2d0e/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06237267dbcab70d4c0e9436d8f719f04a51123f0ca2694c00dd4b68c338e40b", size = 778723, upload-time = "2025-06-10T15:31:56.093Z" }, - { url = "https://files.pythonhosted.org/packages/86/ed/fed0da92b5d5d7340a082e3802d84c6dc9d5fa142954404c41a544c1cb92/pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8a7f332bc565817644cdb38ffe4739e44c3e18c55793f75dddb87630f03fc254", size = 758478, upload-time = "2025-06-10T15:31:58.314Z" }, - { url = "https://files.pythonhosted.org/packages/f0/69/ac02afe286275980ecb2dcdc0156617389b7e0c0a3fcdedf155c67be2b80/pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7d10175a746be65f6feb86224df5d6bc5c049ebf52b89a88cf1cd78af5a367a8", size = 799159, upload-time = "2025-06-10T15:31:59.675Z" }, - { url = "https://files.pythonhosted.org/packages/4e/ac/c492a9da2e39abdff4c3094ec54acac9747743f36428281fb186a03fab76/pyyaml_ft-8.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:58e1015098cf8d8aec82f360789c16283b88ca670fe4275ef6c48c5e30b22a96", size = 158779, upload-time = "2025-06-10T15:32:01.029Z" }, - { url = "https://files.pythonhosted.org/packages/5d/9b/41998df3298960d7c67653669f37710fa2d568a5fc933ea24a6df60acaf6/pyyaml_ft-8.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5f3e2ceb790d50602b2fd4ec37abbd760a8c778e46354df647e7c5a4ebb", size = 191331, upload-time = "2025-06-10T15:32:02.602Z" }, - { url = "https://files.pythonhosted.org/packages/0f/16/2710c252ee04cbd74d9562ebba709e5a284faeb8ada88fcda548c9191b47/pyyaml_ft-8.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8d445bf6ea16bb93c37b42fdacfb2f94c8e92a79ba9e12768c96ecde867046d1", size = 182879, upload-time = "2025-06-10T15:32:04.466Z" }, - { url = "https://files.pythonhosted.org/packages/9a/40/ae8163519d937fa7bfa457b6f78439cc6831a7c2b170e4f612f7eda71815/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c56bb46b4fda34cbb92a9446a841da3982cdde6ea13de3fbd80db7eeeab8b49", size = 811277, upload-time = "2025-06-10T15:32:06.214Z" }, - { url = "https://files.pythonhosted.org/packages/f9/66/28d82dbff7f87b96f0eeac79b7d972a96b4980c1e445eb6a857ba91eda00/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dab0abb46eb1780da486f022dce034b952c8ae40753627b27a626d803926483b", size = 831650, upload-time = "2025-06-10T15:32:08.076Z" }, - { url = "https://files.pythonhosted.org/packages/e8/df/161c4566facac7d75a9e182295c223060373d4116dead9cc53a265de60b9/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd48d639cab5ca50ad957b6dd632c7dd3ac02a1abe0e8196a3c24a52f5db3f7a", size = 815755, upload-time = "2025-06-10T15:32:09.435Z" }, - { url = "https://files.pythonhosted.org/packages/05/10/f42c48fa5153204f42eaa945e8d1fd7c10d6296841dcb2447bf7da1be5c4/pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:052561b89d5b2a8e1289f326d060e794c21fa068aa11255fe71d65baf18a632e", size = 810403, upload-time = "2025-06-10T15:32:11.051Z" }, - { url = "https://files.pythonhosted.org/packages/d5/d2/e369064aa51009eb9245399fd8ad2c562bd0bcd392a00be44b2a824ded7c/pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3bb4b927929b0cb162fb1605392a321e3333e48ce616cdcfa04a839271373255", size = 835581, upload-time = "2025-06-10T15:32:12.897Z" }, - { url = "https://files.pythonhosted.org/packages/c0/28/26534bed77109632a956977f60d8519049f545abc39215d086e33a61f1f2/pyyaml_ft-8.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:de04cfe9439565e32f178106c51dd6ca61afaa2907d143835d501d84703d3793", size = 171579, upload-time = "2025-06-10T15:32:14.34Z" }, -] - -[[package]] -name = "pyzmq" -version = "26.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "implementation_name == 'pypy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/11/b9213d25230ac18a71b39b3723494e57adebe36e066397b961657b3b41c1/pyzmq-26.4.0.tar.gz", hash = "sha256:4bd13f85f80962f91a651a7356fe0472791a5f7a92f227822b5acf44795c626d", size = 278293, upload-time = "2025-04-04T12:05:44.049Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/b8/af1d814ffc3ff9730f9a970cbf216b6f078e5d251a25ef5201d7bc32a37c/pyzmq-26.4.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:0329bdf83e170ac133f44a233fc651f6ed66ef8e66693b5af7d54f45d1ef5918", size = 1339238, upload-time = "2025-04-04T12:03:07.022Z" }, - { url = "https://files.pythonhosted.org/packages/ee/e4/5aafed4886c264f2ea6064601ad39c5fc4e9b6539c6ebe598a859832eeee/pyzmq-26.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:398a825d2dea96227cf6460ce0a174cf7657d6f6827807d4d1ae9d0f9ae64315", size = 672848, upload-time = "2025-04-04T12:03:08.591Z" }, - { url = "https://files.pythonhosted.org/packages/79/39/026bf49c721cb42f1ef3ae0ee3d348212a7621d2adb739ba97599b6e4d50/pyzmq-26.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d52d62edc96787f5c1dfa6c6ccff9b581cfae5a70d94ec4c8da157656c73b5b", size = 911299, upload-time = "2025-04-04T12:03:10Z" }, - { url = "https://files.pythonhosted.org/packages/03/23/b41f936a9403b8f92325c823c0f264c6102a0687a99c820f1aaeb99c1def/pyzmq-26.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1410c3a3705db68d11eb2424d75894d41cff2f64d948ffe245dd97a9debfebf4", size = 867920, upload-time = "2025-04-04T12:03:11.311Z" }, - { url = "https://files.pythonhosted.org/packages/c1/3e/2de5928cdadc2105e7c8f890cc5f404136b41ce5b6eae5902167f1d5641c/pyzmq-26.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7dacb06a9c83b007cc01e8e5277f94c95c453c5851aac5e83efe93e72226353f", size = 862514, upload-time = "2025-04-04T12:03:13.013Z" }, - { url = "https://files.pythonhosted.org/packages/ce/57/109569514dd32e05a61d4382bc88980c95bfd2f02e58fea47ec0ccd96de1/pyzmq-26.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6bab961c8c9b3a4dc94d26e9b2cdf84de9918931d01d6ff38c721a83ab3c0ef5", size = 1204494, upload-time = "2025-04-04T12:03:14.795Z" }, - { url = "https://files.pythonhosted.org/packages/aa/02/dc51068ff2ca70350d1151833643a598625feac7b632372d229ceb4de3e1/pyzmq-26.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a5c09413b924d96af2aa8b57e76b9b0058284d60e2fc3730ce0f979031d162a", size = 1514525, upload-time = "2025-04-04T12:03:16.246Z" }, - { url = "https://files.pythonhosted.org/packages/48/2a/a7d81873fff0645eb60afaec2b7c78a85a377af8f1d911aff045d8955bc7/pyzmq-26.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d489ac234d38e57f458fdbd12a996bfe990ac028feaf6f3c1e81ff766513d3b", size = 1414659, upload-time = "2025-04-04T12:03:17.652Z" }, - { url = "https://files.pythonhosted.org/packages/ef/ea/813af9c42ae21845c1ccfe495bd29c067622a621e85d7cda6bc437de8101/pyzmq-26.4.0-cp310-cp310-win32.whl", hash = "sha256:dea1c8db78fb1b4b7dc9f8e213d0af3fc8ecd2c51a1d5a3ca1cde1bda034a980", size = 580348, upload-time = "2025-04-04T12:03:19.384Z" }, - { url = "https://files.pythonhosted.org/packages/20/68/318666a89a565252c81d3fed7f3b4c54bd80fd55c6095988dfa2cd04a62b/pyzmq-26.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:fa59e1f5a224b5e04dc6c101d7186058efa68288c2d714aa12d27603ae93318b", size = 643838, upload-time = "2025-04-04T12:03:20.795Z" }, - { url = "https://files.pythonhosted.org/packages/91/f8/fb1a15b5f4ecd3e588bfde40c17d32ed84b735195b5c7d1d7ce88301a16f/pyzmq-26.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:a651fe2f447672f4a815e22e74630b6b1ec3a1ab670c95e5e5e28dcd4e69bbb5", size = 559565, upload-time = "2025-04-04T12:03:22.676Z" }, - { url = "https://files.pythonhosted.org/packages/32/6d/234e3b0aa82fd0290b1896e9992f56bdddf1f97266110be54d0177a9d2d9/pyzmq-26.4.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:bfcf82644c9b45ddd7cd2a041f3ff8dce4a0904429b74d73a439e8cab1bd9e54", size = 1339723, upload-time = "2025-04-04T12:03:24.358Z" }, - { url = "https://files.pythonhosted.org/packages/4f/11/6d561efe29ad83f7149a7cd48e498e539ed09019c6cd7ecc73f4cc725028/pyzmq-26.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9bcae3979b2654d5289d3490742378b2f3ce804b0b5fd42036074e2bf35b030", size = 672645, upload-time = "2025-04-04T12:03:25.693Z" }, - { url = "https://files.pythonhosted.org/packages/19/fd/81bfe3e23f418644660bad1a90f0d22f0b3eebe33dd65a79385530bceb3d/pyzmq-26.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccdff8ac4246b6fb60dcf3982dfaeeff5dd04f36051fe0632748fc0aa0679c01", size = 910133, upload-time = "2025-04-04T12:03:27.625Z" }, - { url = "https://files.pythonhosted.org/packages/97/68/321b9c775595ea3df832a9516252b653fe32818db66fdc8fa31c9b9fce37/pyzmq-26.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4550af385b442dc2d55ab7717837812799d3674cb12f9a3aa897611839c18e9e", size = 867428, upload-time = "2025-04-04T12:03:29.004Z" }, - { url = "https://files.pythonhosted.org/packages/4e/6e/159cbf2055ef36aa2aa297e01b24523176e5b48ead283c23a94179fb2ba2/pyzmq-26.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f7ffe9db1187a253fca95191854b3fda24696f086e8789d1d449308a34b88", size = 862409, upload-time = "2025-04-04T12:03:31.032Z" }, - { url = "https://files.pythonhosted.org/packages/05/1c/45fb8db7be5a7d0cadea1070a9cbded5199a2d578de2208197e592f219bd/pyzmq-26.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3709c9ff7ba61589b7372923fd82b99a81932b592a5c7f1a24147c91da9a68d6", size = 1205007, upload-time = "2025-04-04T12:03:32.687Z" }, - { url = "https://files.pythonhosted.org/packages/f8/fa/658c7f583af6498b463f2fa600f34e298e1b330886f82f1feba0dc2dd6c3/pyzmq-26.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f8f3c30fb2d26ae5ce36b59768ba60fb72507ea9efc72f8f69fa088450cff1df", size = 1514599, upload-time = "2025-04-04T12:03:34.084Z" }, - { url = "https://files.pythonhosted.org/packages/4d/d7/44d641522353ce0a2bbd150379cb5ec32f7120944e6bfba4846586945658/pyzmq-26.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:382a4a48c8080e273427fc692037e3f7d2851959ffe40864f2db32646eeb3cef", size = 1414546, upload-time = "2025-04-04T12:03:35.478Z" }, - { url = "https://files.pythonhosted.org/packages/72/76/c8ed7263218b3d1e9bce07b9058502024188bd52cc0b0a267a9513b431fc/pyzmq-26.4.0-cp311-cp311-win32.whl", hash = "sha256:d56aad0517d4c09e3b4f15adebba8f6372c5102c27742a5bdbfc74a7dceb8fca", size = 579247, upload-time = "2025-04-04T12:03:36.846Z" }, - { url = "https://files.pythonhosted.org/packages/c3/d0/2d9abfa2571a0b1a67c0ada79a8aa1ba1cce57992d80f771abcdf99bb32c/pyzmq-26.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:963977ac8baed7058c1e126014f3fe58b3773f45c78cce7af5c26c09b6823896", size = 644727, upload-time = "2025-04-04T12:03:38.578Z" }, - { url = "https://files.pythonhosted.org/packages/0d/d1/c8ad82393be6ccedfc3c9f3adb07f8f3976e3c4802640fe3f71441941e70/pyzmq-26.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0c8e8cadc81e44cc5088fcd53b9b3b4ce9344815f6c4a03aec653509296fae3", size = 559942, upload-time = "2025-04-04T12:03:40.143Z" }, - { url = "https://files.pythonhosted.org/packages/10/44/a778555ebfdf6c7fc00816aad12d185d10a74d975800341b1bc36bad1187/pyzmq-26.4.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5227cb8da4b6f68acfd48d20c588197fd67745c278827d5238c707daf579227b", size = 1341586, upload-time = "2025-04-04T12:03:41.954Z" }, - { url = "https://files.pythonhosted.org/packages/9c/4f/f3a58dc69ac757e5103be3bd41fb78721a5e17da7cc617ddb56d973a365c/pyzmq-26.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1c07a7fa7f7ba86554a2b1bef198c9fed570c08ee062fd2fd6a4dcacd45f905", size = 665880, upload-time = "2025-04-04T12:03:43.45Z" }, - { url = "https://files.pythonhosted.org/packages/fe/45/50230bcfb3ae5cb98bee683b6edeba1919f2565d7cc1851d3c38e2260795/pyzmq-26.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae775fa83f52f52de73183f7ef5395186f7105d5ed65b1ae65ba27cb1260de2b", size = 902216, upload-time = "2025-04-04T12:03:45.572Z" }, - { url = "https://files.pythonhosted.org/packages/41/59/56bbdc5689be5e13727491ad2ba5efd7cd564365750514f9bc8f212eef82/pyzmq-26.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c760d0226ebd52f1e6b644a9e839b5db1e107a23f2fcd46ec0569a4fdd4e63", size = 859814, upload-time = "2025-04-04T12:03:47.188Z" }, - { url = "https://files.pythonhosted.org/packages/81/b1/57db58cfc8af592ce94f40649bd1804369c05b2190e4cbc0a2dad572baeb/pyzmq-26.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ef8c6ecc1d520debc147173eaa3765d53f06cd8dbe7bd377064cdbc53ab456f5", size = 855889, upload-time = "2025-04-04T12:03:49.223Z" }, - { url = "https://files.pythonhosted.org/packages/e8/92/47542e629cbac8f221c230a6d0f38dd3d9cff9f6f589ed45fdf572ffd726/pyzmq-26.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3150ef4084e163dec29ae667b10d96aad309b668fac6810c9e8c27cf543d6e0b", size = 1197153, upload-time = "2025-04-04T12:03:50.591Z" }, - { url = "https://files.pythonhosted.org/packages/07/e5/b10a979d1d565d54410afc87499b16c96b4a181af46e7645ab4831b1088c/pyzmq-26.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4448c9e55bf8329fa1dcedd32f661bf611214fa70c8e02fee4347bc589d39a84", size = 1507352, upload-time = "2025-04-04T12:03:52.473Z" }, - { url = "https://files.pythonhosted.org/packages/ab/58/5a23db84507ab9c01c04b1232a7a763be66e992aa2e66498521bbbc72a71/pyzmq-26.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e07dde3647afb084d985310d067a3efa6efad0621ee10826f2cb2f9a31b89d2f", size = 1406834, upload-time = "2025-04-04T12:03:54Z" }, - { url = "https://files.pythonhosted.org/packages/22/74/aaa837b331580c13b79ac39396601fb361454ee184ca85e8861914769b99/pyzmq-26.4.0-cp312-cp312-win32.whl", hash = "sha256:ba034a32ecf9af72adfa5ee383ad0fd4f4e38cdb62b13624278ef768fe5b5b44", size = 577992, upload-time = "2025-04-04T12:03:55.815Z" }, - { url = "https://files.pythonhosted.org/packages/30/0f/55f8c02c182856743b82dde46b2dc3e314edda7f1098c12a8227eeda0833/pyzmq-26.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:056a97aab4064f526ecb32f4343917a4022a5d9efb6b9df990ff72e1879e40be", size = 640466, upload-time = "2025-04-04T12:03:57.231Z" }, - { url = "https://files.pythonhosted.org/packages/e4/29/073779afc3ef6f830b8de95026ef20b2d1ec22d0324d767748d806e57379/pyzmq-26.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:2f23c750e485ce1eb639dbd576d27d168595908aa2d60b149e2d9e34c9df40e0", size = 556342, upload-time = "2025-04-04T12:03:59.218Z" }, - { url = "https://files.pythonhosted.org/packages/d7/20/fb2c92542488db70f833b92893769a569458311a76474bda89dc4264bd18/pyzmq-26.4.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:c43fac689880f5174d6fc864857d1247fe5cfa22b09ed058a344ca92bf5301e3", size = 1339484, upload-time = "2025-04-04T12:04:00.671Z" }, - { url = "https://files.pythonhosted.org/packages/58/29/2f06b9cabda3a6ea2c10f43e67ded3e47fc25c54822e2506dfb8325155d4/pyzmq-26.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:902aca7eba477657c5fb81c808318460328758e8367ecdd1964b6330c73cae43", size = 666106, upload-time = "2025-04-04T12:04:02.366Z" }, - { url = "https://files.pythonhosted.org/packages/77/e4/dcf62bd29e5e190bd21bfccaa4f3386e01bf40d948c239239c2f1e726729/pyzmq-26.4.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e48a830bfd152fe17fbdeaf99ac5271aa4122521bf0d275b6b24e52ef35eb6", size = 902056, upload-time = "2025-04-04T12:04:03.919Z" }, - { url = "https://files.pythonhosted.org/packages/1a/cf/b36b3d7aea236087d20189bec1a87eeb2b66009731d7055e5c65f845cdba/pyzmq-26.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31be2b6de98c824c06f5574331f805707c667dc8f60cb18580b7de078479891e", size = 860148, upload-time = "2025-04-04T12:04:05.581Z" }, - { url = "https://files.pythonhosted.org/packages/18/a6/f048826bc87528c208e90604c3bf573801e54bd91e390cbd2dfa860e82dc/pyzmq-26.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6332452034be001bbf3206ac59c0d2a7713de5f25bb38b06519fc6967b7cf771", size = 855983, upload-time = "2025-04-04T12:04:07.096Z" }, - { url = "https://files.pythonhosted.org/packages/0a/27/454d34ab6a1d9772a36add22f17f6b85baf7c16e14325fa29e7202ca8ee8/pyzmq-26.4.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:da8c0f5dd352136853e6a09b1b986ee5278dfddfebd30515e16eae425c872b30", size = 1197274, upload-time = "2025-04-04T12:04:08.523Z" }, - { url = "https://files.pythonhosted.org/packages/f4/3d/7abfeab6b83ad38aa34cbd57c6fc29752c391e3954fd12848bd8d2ec0df6/pyzmq-26.4.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f4ccc1a0a2c9806dda2a2dd118a3b7b681e448f3bb354056cad44a65169f6d86", size = 1507120, upload-time = "2025-04-04T12:04:10.58Z" }, - { url = "https://files.pythonhosted.org/packages/13/ff/bc8d21dbb9bc8705126e875438a1969c4f77e03fc8565d6901c7933a3d01/pyzmq-26.4.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c0b5fceadbab461578daf8d1dcc918ebe7ddd2952f748cf30c7cf2de5d51101", size = 1406738, upload-time = "2025-04-04T12:04:12.509Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5d/d4cd85b24de71d84d81229e3bbb13392b2698432cf8fdcea5afda253d587/pyzmq-26.4.0-cp313-cp313-win32.whl", hash = "sha256:28e2b0ff5ba4b3dd11062d905682bad33385cfa3cc03e81abd7f0822263e6637", size = 577826, upload-time = "2025-04-04T12:04:14.289Z" }, - { url = "https://files.pythonhosted.org/packages/c6/6c/f289c1789d7bb6e5a3b3bef7b2a55089b8561d17132be7d960d3ff33b14e/pyzmq-26.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:23ecc9d241004c10e8b4f49d12ac064cd7000e1643343944a10df98e57bc544b", size = 640406, upload-time = "2025-04-04T12:04:15.757Z" }, - { url = "https://files.pythonhosted.org/packages/b3/99/676b8851cb955eb5236a0c1e9ec679ea5ede092bf8bf2c8a68d7e965cac3/pyzmq-26.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:1edb0385c7f025045d6e0f759d4d3afe43c17a3d898914ec6582e6f464203c08", size = 556216, upload-time = "2025-04-04T12:04:17.212Z" }, - { url = "https://files.pythonhosted.org/packages/65/c2/1fac340de9d7df71efc59d9c50fc7a635a77b103392d1842898dd023afcb/pyzmq-26.4.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:93a29e882b2ba1db86ba5dd5e88e18e0ac6b627026c5cfbec9983422011b82d4", size = 1333769, upload-time = "2025-04-04T12:04:18.665Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c7/6c03637e8d742c3b00bec4f5e4cd9d1c01b2f3694c6f140742e93ca637ed/pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45684f276f57110bb89e4300c00f1233ca631f08f5f42528a5c408a79efc4a", size = 658826, upload-time = "2025-04-04T12:04:20.405Z" }, - { url = "https://files.pythonhosted.org/packages/a5/97/a8dca65913c0f78e0545af2bb5078aebfc142ca7d91cdaffa1fbc73e5dbd/pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72073e75260cb301aad4258ad6150fa7f57c719b3f498cb91e31df16784d89b", size = 891650, upload-time = "2025-04-04T12:04:22.413Z" }, - { url = "https://files.pythonhosted.org/packages/7d/7e/f63af1031eb060bf02d033732b910fe48548dcfdbe9c785e9f74a6cc6ae4/pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be37e24b13026cfedd233bcbbccd8c0bcd2fdd186216094d095f60076201538d", size = 849776, upload-time = "2025-04-04T12:04:23.959Z" }, - { url = "https://files.pythonhosted.org/packages/f6/fa/1a009ce582802a895c0d5fe9413f029c940a0a8ee828657a3bb0acffd88b/pyzmq-26.4.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:237b283044934d26f1eeff4075f751b05d2f3ed42a257fc44386d00df6a270cf", size = 842516, upload-time = "2025-04-04T12:04:25.449Z" }, - { url = "https://files.pythonhosted.org/packages/6e/bc/f88b0bad0f7a7f500547d71e99f10336f2314e525d4ebf576a1ea4a1d903/pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:b30f862f6768b17040929a68432c8a8be77780317f45a353cb17e423127d250c", size = 1189183, upload-time = "2025-04-04T12:04:27.035Z" }, - { url = "https://files.pythonhosted.org/packages/d9/8c/db446a3dd9cf894406dec2e61eeffaa3c07c3abb783deaebb9812c4af6a5/pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:c80fcd3504232f13617c6ab501124d373e4895424e65de8b72042333316f64a8", size = 1495501, upload-time = "2025-04-04T12:04:28.833Z" }, - { url = "https://files.pythonhosted.org/packages/05/4c/bf3cad0d64c3214ac881299c4562b815f05d503bccc513e3fd4fdc6f67e4/pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:26a2a7451606b87f67cdeca2c2789d86f605da08b4bd616b1a9981605ca3a364", size = 1395540, upload-time = "2025-04-04T12:04:30.562Z" }, - { url = "https://files.pythonhosted.org/packages/06/91/21d3af57bc77e86e9d1e5384f256fd25cdb4c8eed4c45c8119da8120915f/pyzmq-26.4.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:a88643de8abd000ce99ca72056a1a2ae15881ee365ecb24dd1d9111e43d57842", size = 1340634, upload-time = "2025-04-04T12:04:47.661Z" }, - { url = "https://files.pythonhosted.org/packages/54/e6/58cd825023e998a0e49db7322b3211e6cf93f0796710b77d1496304c10d1/pyzmq-26.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a744ce209ecb557406fb928f3c8c55ce79b16c3eeb682da38ef5059a9af0848", size = 907880, upload-time = "2025-04-04T12:04:49.294Z" }, - { url = "https://files.pythonhosted.org/packages/72/83/619e44a766ef738cb7e8ed8e5a54565627801bdb027ca6dfb70762385617/pyzmq-26.4.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9434540f333332224ecb02ee6278b6c6f11ea1266b48526e73c903119b2f420f", size = 863003, upload-time = "2025-04-04T12:04:51Z" }, - { url = "https://files.pythonhosted.org/packages/b6/6a/a59af31320598bdc63d2c5a3181d14a89673c2c794540678285482e8a342/pyzmq-26.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c6f0a23e55cd38d27d4c89add963294ea091ebcb104d7fdab0f093bc5abb1c", size = 673432, upload-time = "2025-04-04T12:04:52.611Z" }, - { url = "https://files.pythonhosted.org/packages/29/ae/64dd6c18b08ce2cb009c60f11cf01c87f323acd80344d8b059c0304a7370/pyzmq-26.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6145df55dc2309f6ef72d70576dcd5aabb0fd373311613fe85a5e547c722b780", size = 1205221, upload-time = "2025-04-04T12:04:54.31Z" }, - { url = "https://files.pythonhosted.org/packages/d0/0b/c583ab750957b025244a66948831bc9ca486d11c820da4626caf6480ee1a/pyzmq-26.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2ea81823840ef8c56e5d2f9918e4d571236294fea4d1842b302aebffb9e40997", size = 1515299, upload-time = "2025-04-04T12:04:56.063Z" }, - { url = "https://files.pythonhosted.org/packages/22/ba/95ba76292c49dd9c6dff1f127b4867033020b708d101cba6e4fc5a3d166d/pyzmq-26.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc2abc385dc37835445abe206524fbc0c9e3fce87631dfaa90918a1ba8f425eb", size = 1415366, upload-time = "2025-04-04T12:04:58.241Z" }, - { url = "https://files.pythonhosted.org/packages/6e/65/51abe36169effda26ac7400ffac96f463e09dff40d344cdc2629d9a59162/pyzmq-26.4.0-cp39-cp39-win32.whl", hash = "sha256:41a2508fe7bed4c76b4cf55aacfb8733926f59d440d9ae2b81ee8220633b4d12", size = 580773, upload-time = "2025-04-04T12:04:59.786Z" }, - { url = "https://files.pythonhosted.org/packages/89/68/d9ac94086c63a0ed8d73e9e8aec54b39f481696698a5a939a7207629fb30/pyzmq-26.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4000e8255d6cbce38982e5622ebb90823f3409b7ffe8aeae4337ef7d6d2612a", size = 644340, upload-time = "2025-04-04T12:05:01.389Z" }, - { url = "https://files.pythonhosted.org/packages/dc/8f/66c261d657c1b0791ee5b372c90b1646b453adb581fcdc1dc5c94e5b03e3/pyzmq-26.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f6919d9c120488246bdc2a2f96662fa80d67b35bd6d66218f457e722b3ff64", size = 560075, upload-time = "2025-04-04T12:05:02.975Z" }, - { url = "https://files.pythonhosted.org/packages/47/03/96004704a84095f493be8d2b476641f5c967b269390173f85488a53c1c13/pyzmq-26.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:98d948288ce893a2edc5ec3c438fe8de2daa5bbbd6e2e865ec5f966e237084ba", size = 834408, upload-time = "2025-04-04T12:05:04.569Z" }, - { url = "https://files.pythonhosted.org/packages/e4/7f/68d8f3034a20505db7551cb2260248be28ca66d537a1ac9a257913d778e4/pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9f34f5c9e0203ece706a1003f1492a56c06c0632d86cb77bcfe77b56aacf27b", size = 569580, upload-time = "2025-04-04T12:05:06.283Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a6/2b0d6801ec33f2b2a19dd8d02e0a1e8701000fec72926e6787363567d30c/pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80c9b48aef586ff8b698359ce22f9508937c799cc1d2c9c2f7c95996f2300c94", size = 798250, upload-time = "2025-04-04T12:05:07.88Z" }, - { url = "https://files.pythonhosted.org/packages/96/2a/0322b3437de977dcac8a755d6d7ce6ec5238de78e2e2d9353730b297cf12/pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f2a5b74009fd50b53b26f65daff23e9853e79aa86e0aa08a53a7628d92d44a", size = 756758, upload-time = "2025-04-04T12:05:09.483Z" }, - { url = "https://files.pythonhosted.org/packages/c2/33/43704f066369416d65549ccee366cc19153911bec0154da7c6b41fca7e78/pyzmq-26.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:61c5f93d7622d84cb3092d7f6398ffc77654c346545313a3737e266fc11a3beb", size = 555371, upload-time = "2025-04-04T12:05:11.062Z" }, - { url = "https://files.pythonhosted.org/packages/04/52/a70fcd5592715702248306d8e1729c10742c2eac44529984413b05c68658/pyzmq-26.4.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4478b14cb54a805088299c25a79f27eaf530564a7a4f72bf432a040042b554eb", size = 834405, upload-time = "2025-04-04T12:05:13.3Z" }, - { url = "https://files.pythonhosted.org/packages/25/f9/1a03f1accff16b3af1a6fa22cbf7ced074776abbf688b2e9cb4629700c62/pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a28ac29c60e4ba84b5f58605ace8ad495414a724fe7aceb7cf06cd0598d04e1", size = 569578, upload-time = "2025-04-04T12:05:15.36Z" }, - { url = "https://files.pythonhosted.org/packages/76/0c/3a633acd762aa6655fcb71fa841907eae0ab1e8582ff494b137266de341d/pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43b03c1ceea27c6520124f4fb2ba9c647409b9abdf9a62388117148a90419494", size = 798248, upload-time = "2025-04-04T12:05:17.376Z" }, - { url = "https://files.pythonhosted.org/packages/cd/cc/6c99c84aa60ac1cc56747bed6be8ce6305b9b861d7475772e7a25ce019d3/pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7731abd23a782851426d4e37deb2057bf9410848a4459b5ede4fe89342e687a9", size = 756757, upload-time = "2025-04-04T12:05:19.19Z" }, - { url = "https://files.pythonhosted.org/packages/13/9c/d8073bd898eb896e94c679abe82e47506e2b750eb261cf6010ced869797c/pyzmq-26.4.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a222ad02fbe80166b0526c038776e8042cd4e5f0dec1489a006a1df47e9040e0", size = 555371, upload-time = "2025-04-04T12:05:20.702Z" }, - { url = "https://files.pythonhosted.org/packages/af/b2/71a644b629e1a93ccae9e22a45aec9d23065dfcc24c399cb837f81cd08c2/pyzmq-26.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:552b0d2e39987733e1e9e948a0ced6ff75e0ea39ab1a1db2fc36eb60fd8760db", size = 834397, upload-time = "2025-04-04T12:05:31.217Z" }, - { url = "https://files.pythonhosted.org/packages/a9/dd/052a25651eaaff8f5fd652fb40a3abb400e71207db2d605cf6faf0eac598/pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd670a8aa843f2ee637039bbd412e0d7294a5e588e1ecc9ad98b0cdc050259a4", size = 569571, upload-time = "2025-04-04T12:05:32.877Z" }, - { url = "https://files.pythonhosted.org/packages/a5/5d/201ca10b5d12ab187a418352c06d70c3e2087310af038b11056aba1359be/pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d367b7b775a0e1e54a59a2ba3ed4d5e0a31566af97cc9154e34262777dab95ed", size = 798243, upload-time = "2025-04-04T12:05:34.91Z" }, - { url = "https://files.pythonhosted.org/packages/bd/d4/2c64e54749536ad1633400f28d71e71e19375d00ce1fe9bb1123364dc927/pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112af16c406e4a93df2caef49f884f4c2bb2b558b0b5577ef0b2465d15c1abc", size = 756751, upload-time = "2025-04-04T12:05:37.12Z" }, - { url = "https://files.pythonhosted.org/packages/08/e6/34d119af43d06a8dcd88bf7a62dac69597eaba52b49ecce76ff06b40f1fd/pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c76c298683f82669cab0b6da59071f55238c039738297c69f187a542c6d40099", size = 745400, upload-time = "2025-04-04T12:05:40.694Z" }, - { url = "https://files.pythonhosted.org/packages/f8/49/b5e471d74a63318e51f30d329b17d2550bdededaab55baed2e2499de7ce4/pyzmq-26.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:49b6ca2e625b46f499fb081aaf7819a177f41eeb555acb05758aa97f4f95d147", size = 555367, upload-time = "2025-04-04T12:05:42.356Z" }, -] - -[[package]] -name = "referencing" -version = "0.36.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "rpds-py" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, -] - -[[package]] -name = "requests" -version = "2.32.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, -] - -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" }, -] - -[[package]] -name = "rfc3986-validator" -version = "0.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/da/88/f270de456dd7d11dcc808abfa291ecdd3f45ff44e3b549ffa01b126464d0/rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055", size = 6760, upload-time = "2019-10-28T16:00:19.144Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/51/17023c0f8f1869d8806b979a2bffa3f861f26a3f1a66b094288323fba52f/rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9", size = 4242, upload-time = "2019-10-28T16:00:13.976Z" }, -] - -[[package]] -name = "rpds-py" -version = "0.25.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/a6/60184b7fc00dd3ca80ac635dd5b8577d444c57e8e8742cecabfacb829921/rpds_py-0.25.1.tar.gz", hash = "sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3", size = 27304, upload-time = "2025-05-21T12:46:12.502Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/09/e1158988e50905b7f8306487a576b52d32aa9a87f79f7ab24ee8db8b6c05/rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9", size = 373140, upload-time = "2025-05-21T12:42:38.834Z" }, - { url = "https://files.pythonhosted.org/packages/e0/4b/a284321fb3c45c02fc74187171504702b2934bfe16abab89713eedfe672e/rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40", size = 358860, upload-time = "2025-05-21T12:42:41.394Z" }, - { url = "https://files.pythonhosted.org/packages/4e/46/8ac9811150c75edeae9fc6fa0e70376c19bc80f8e1f7716981433905912b/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:605ffe7769e24b1800b4d024d24034405d9404f0bc2f55b6db3362cd34145a6f", size = 386179, upload-time = "2025-05-21T12:42:43.213Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ec/87eb42d83e859bce91dcf763eb9f2ab117142a49c9c3d17285440edb5b69/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc6f3ddef93243538be76f8e47045b4aad7a66a212cd3a0f23e34469473d36b", size = 400282, upload-time = "2025-05-21T12:42:44.92Z" }, - { url = "https://files.pythonhosted.org/packages/68/c8/2a38e0707d7919c8c78e1d582ab15cf1255b380bcb086ca265b73ed6db23/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f70316f760174ca04492b5ab01be631a8ae30cadab1d1081035136ba12738cfa", size = 521824, upload-time = "2025-05-21T12:42:46.856Z" }, - { url = "https://files.pythonhosted.org/packages/5e/2c/6a92790243569784dde84d144bfd12bd45102f4a1c897d76375076d730ab/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1dafef8df605fdb46edcc0bf1573dea0d6d7b01ba87f85cd04dc855b2b4479e", size = 411644, upload-time = "2025-05-21T12:42:48.838Z" }, - { url = "https://files.pythonhosted.org/packages/eb/76/66b523ffc84cf47db56efe13ae7cf368dee2bacdec9d89b9baca5e2e6301/rpds_py-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0701942049095741a8aeb298a31b203e735d1c61f4423511d2b1a41dcd8a16da", size = 386955, upload-time = "2025-05-21T12:42:50.835Z" }, - { url = "https://files.pythonhosted.org/packages/b6/b9/a362d7522feaa24dc2b79847c6175daa1c642817f4a19dcd5c91d3e2c316/rpds_py-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e87798852ae0b37c88babb7f7bbbb3e3fecc562a1c340195b44c7e24d403e380", size = 421039, upload-time = "2025-05-21T12:42:52.348Z" }, - { url = "https://files.pythonhosted.org/packages/0f/c4/b5b6f70b4d719b6584716889fd3413102acf9729540ee76708d56a76fa97/rpds_py-0.25.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3bcce0edc1488906c2d4c75c94c70a0417e83920dd4c88fec1078c94843a6ce9", size = 563290, upload-time = "2025-05-21T12:42:54.404Z" }, - { url = "https://files.pythonhosted.org/packages/87/a3/2e6e816615c12a8f8662c9d8583a12eb54c52557521ef218cbe3095a8afa/rpds_py-0.25.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e2f6a2347d3440ae789505693a02836383426249d5293541cd712e07e7aecf54", size = 592089, upload-time = "2025-05-21T12:42:55.976Z" }, - { url = "https://files.pythonhosted.org/packages/c0/08/9b8e1050e36ce266135994e2c7ec06e1841f1c64da739daeb8afe9cb77a4/rpds_py-0.25.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4fd52d3455a0aa997734f3835cbc4c9f32571345143960e7d7ebfe7b5fbfa3b2", size = 558400, upload-time = "2025-05-21T12:42:58.032Z" }, - { url = "https://files.pythonhosted.org/packages/f2/df/b40b8215560b8584baccd839ff5c1056f3c57120d79ac41bd26df196da7e/rpds_py-0.25.1-cp310-cp310-win32.whl", hash = "sha256:3f0b1798cae2bbbc9b9db44ee068c556d4737911ad53a4e5093d09d04b3bbc24", size = 219741, upload-time = "2025-05-21T12:42:59.479Z" }, - { url = "https://files.pythonhosted.org/packages/10/99/e4c58be18cf5d8b40b8acb4122bc895486230b08f978831b16a3916bd24d/rpds_py-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:3ebd879ab996537fc510a2be58c59915b5dd63bccb06d1ef514fee787e05984a", size = 231553, upload-time = "2025-05-21T12:43:01.425Z" }, - { url = "https://files.pythonhosted.org/packages/95/e1/df13fe3ddbbea43567e07437f097863b20c99318ae1f58a0fe389f763738/rpds_py-0.25.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5f048bbf18b1f9120685c6d6bb70cc1a52c8cc11bdd04e643d28d3be0baf666d", size = 373341, upload-time = "2025-05-21T12:43:02.978Z" }, - { url = "https://files.pythonhosted.org/packages/7a/58/deef4d30fcbcbfef3b6d82d17c64490d5c94585a2310544ce8e2d3024f83/rpds_py-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fbb0dbba559959fcb5d0735a0f87cdbca9e95dac87982e9b95c0f8f7ad10255", size = 359111, upload-time = "2025-05-21T12:43:05.128Z" }, - { url = "https://files.pythonhosted.org/packages/bb/7e/39f1f4431b03e96ebaf159e29a0f82a77259d8f38b2dd474721eb3a8ac9b/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4ca54b9cf9d80b4016a67a0193ebe0bcf29f6b0a96f09db942087e294d3d4c2", size = 386112, upload-time = "2025-05-21T12:43:07.13Z" }, - { url = "https://files.pythonhosted.org/packages/db/e7/847068a48d63aec2ae695a1646089620b3b03f8ccf9f02c122ebaf778f3c/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee3e26eb83d39b886d2cb6e06ea701bba82ef30a0de044d34626ede51ec98b0", size = 400362, upload-time = "2025-05-21T12:43:08.693Z" }, - { url = "https://files.pythonhosted.org/packages/3b/3d/9441d5db4343d0cee759a7ab4d67420a476cebb032081763de934719727b/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89706d0683c73a26f76a5315d893c051324d771196ae8b13e6ffa1ffaf5e574f", size = 522214, upload-time = "2025-05-21T12:43:10.694Z" }, - { url = "https://files.pythonhosted.org/packages/a2/ec/2cc5b30d95f9f1a432c79c7a2f65d85e52812a8f6cbf8768724571710786/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2013ee878c76269c7b557a9a9c042335d732e89d482606990b70a839635feb7", size = 411491, upload-time = "2025-05-21T12:43:12.739Z" }, - { url = "https://files.pythonhosted.org/packages/dc/6c/44695c1f035077a017dd472b6a3253553780837af2fac9b6ac25f6a5cb4d/rpds_py-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45e484db65e5380804afbec784522de84fa95e6bb92ef1bd3325d33d13efaebd", size = 386978, upload-time = "2025-05-21T12:43:14.25Z" }, - { url = "https://files.pythonhosted.org/packages/b1/74/b4357090bb1096db5392157b4e7ed8bb2417dc7799200fcbaee633a032c9/rpds_py-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:48d64155d02127c249695abb87d39f0faf410733428d499867606be138161d65", size = 420662, upload-time = "2025-05-21T12:43:15.8Z" }, - { url = "https://files.pythonhosted.org/packages/26/dd/8cadbebf47b96e59dfe8b35868e5c38a42272699324e95ed522da09d3a40/rpds_py-0.25.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:048893e902132fd6548a2e661fb38bf4896a89eea95ac5816cf443524a85556f", size = 563385, upload-time = "2025-05-21T12:43:17.78Z" }, - { url = "https://files.pythonhosted.org/packages/c3/ea/92960bb7f0e7a57a5ab233662f12152085c7dc0d5468534c65991a3d48c9/rpds_py-0.25.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0317177b1e8691ab5879f4f33f4b6dc55ad3b344399e23df2e499de7b10a548d", size = 592047, upload-time = "2025-05-21T12:43:19.457Z" }, - { url = "https://files.pythonhosted.org/packages/61/ad/71aabc93df0d05dabcb4b0c749277881f8e74548582d96aa1bf24379493a/rpds_py-0.25.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffcf57826d77a4151962bf1701374e0fc87f536e56ec46f1abdd6a903354042", size = 557863, upload-time = "2025-05-21T12:43:21.69Z" }, - { url = "https://files.pythonhosted.org/packages/93/0f/89df0067c41f122b90b76f3660028a466eb287cbe38efec3ea70e637ca78/rpds_py-0.25.1-cp311-cp311-win32.whl", hash = "sha256:cda776f1967cb304816173b30994faaf2fd5bcb37e73118a47964a02c348e1bc", size = 219627, upload-time = "2025-05-21T12:43:23.311Z" }, - { url = "https://files.pythonhosted.org/packages/7c/8d/93b1a4c1baa903d0229374d9e7aa3466d751f1d65e268c52e6039c6e338e/rpds_py-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:dc3c1ff0abc91444cd20ec643d0f805df9a3661fcacf9c95000329f3ddf268a4", size = 231603, upload-time = "2025-05-21T12:43:25.145Z" }, - { url = "https://files.pythonhosted.org/packages/cb/11/392605e5247bead2f23e6888e77229fbd714ac241ebbebb39a1e822c8815/rpds_py-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:5a3ddb74b0985c4387719fc536faced33cadf2172769540c62e2a94b7b9be1c4", size = 223967, upload-time = "2025-05-21T12:43:26.566Z" }, - { url = "https://files.pythonhosted.org/packages/7f/81/28ab0408391b1dc57393653b6a0cf2014cc282cc2909e4615e63e58262be/rpds_py-0.25.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5ffe453cde61f73fea9430223c81d29e2fbf412a6073951102146c84e19e34c", size = 364647, upload-time = "2025-05-21T12:43:28.559Z" }, - { url = "https://files.pythonhosted.org/packages/2c/9a/7797f04cad0d5e56310e1238434f71fc6939d0bc517192a18bb99a72a95f/rpds_py-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:115874ae5e2fdcfc16b2aedc95b5eef4aebe91b28e7e21951eda8a5dc0d3461b", size = 350454, upload-time = "2025-05-21T12:43:30.615Z" }, - { url = "https://files.pythonhosted.org/packages/69/3c/93d2ef941b04898011e5d6eaa56a1acf46a3b4c9f4b3ad1bbcbafa0bee1f/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a714bf6e5e81b0e570d01f56e0c89c6375101b8463999ead3a93a5d2a4af91fa", size = 389665, upload-time = "2025-05-21T12:43:32.629Z" }, - { url = "https://files.pythonhosted.org/packages/c1/57/ad0e31e928751dde8903a11102559628d24173428a0f85e25e187defb2c1/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35634369325906bcd01577da4c19e3b9541a15e99f31e91a02d010816b49bfda", size = 403873, upload-time = "2025-05-21T12:43:34.576Z" }, - { url = "https://files.pythonhosted.org/packages/16/ad/c0c652fa9bba778b4f54980a02962748479dc09632e1fd34e5282cf2556c/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4cb2b3ddc16710548801c6fcc0cfcdeeff9dafbc983f77265877793f2660309", size = 525866, upload-time = "2025-05-21T12:43:36.123Z" }, - { url = "https://files.pythonhosted.org/packages/2a/39/3e1839bc527e6fcf48d5fec4770070f872cdee6c6fbc9b259932f4e88a38/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ceca1cf097ed77e1a51f1dbc8d174d10cb5931c188a4505ff9f3e119dfe519b", size = 416886, upload-time = "2025-05-21T12:43:38.034Z" }, - { url = "https://files.pythonhosted.org/packages/7a/95/dd6b91cd4560da41df9d7030a038298a67d24f8ca38e150562644c829c48/rpds_py-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2cd1a4b0c2b8c5e31ffff50d09f39906fe351389ba143c195566056c13a7ea", size = 390666, upload-time = "2025-05-21T12:43:40.065Z" }, - { url = "https://files.pythonhosted.org/packages/64/48/1be88a820e7494ce0a15c2d390ccb7c52212370badabf128e6a7bb4cb802/rpds_py-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de336a4b164c9188cb23f3703adb74a7623ab32d20090d0e9bf499a2203ad65", size = 425109, upload-time = "2025-05-21T12:43:42.263Z" }, - { url = "https://files.pythonhosted.org/packages/cf/07/3e2a17927ef6d7720b9949ec1b37d1e963b829ad0387f7af18d923d5cfa5/rpds_py-0.25.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9fca84a15333e925dd59ce01da0ffe2ffe0d6e5d29a9eeba2148916d1824948c", size = 567244, upload-time = "2025-05-21T12:43:43.846Z" }, - { url = "https://files.pythonhosted.org/packages/d2/e5/76cf010998deccc4f95305d827847e2eae9c568099c06b405cf96384762b/rpds_py-0.25.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88ec04afe0c59fa64e2f6ea0dd9657e04fc83e38de90f6de201954b4d4eb59bd", size = 596023, upload-time = "2025-05-21T12:43:45.932Z" }, - { url = "https://files.pythonhosted.org/packages/52/9a/df55efd84403736ba37a5a6377b70aad0fd1cb469a9109ee8a1e21299a1c/rpds_py-0.25.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8bd2f19e312ce3e1d2c635618e8a8d8132892bb746a7cf74780a489f0f6cdcb", size = 561634, upload-time = "2025-05-21T12:43:48.263Z" }, - { url = "https://files.pythonhosted.org/packages/ab/aa/dc3620dd8db84454aaf9374bd318f1aa02578bba5e567f5bf6b79492aca4/rpds_py-0.25.1-cp312-cp312-win32.whl", hash = "sha256:e5e2f7280d8d0d3ef06f3ec1b4fd598d386cc6f0721e54f09109a8132182fbfe", size = 222713, upload-time = "2025-05-21T12:43:49.897Z" }, - { url = "https://files.pythonhosted.org/packages/a3/7f/7cef485269a50ed5b4e9bae145f512d2a111ca638ae70cc101f661b4defd/rpds_py-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:db58483f71c5db67d643857404da360dce3573031586034b7d59f245144cc192", size = 235280, upload-time = "2025-05-21T12:43:51.893Z" }, - { url = "https://files.pythonhosted.org/packages/99/f2/c2d64f6564f32af913bf5f3f7ae41c7c263c5ae4c4e8f1a17af8af66cd46/rpds_py-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:6d50841c425d16faf3206ddbba44c21aa3310a0cebc3c1cdfc3e3f4f9f6f5728", size = 225399, upload-time = "2025-05-21T12:43:53.351Z" }, - { url = "https://files.pythonhosted.org/packages/2b/da/323848a2b62abe6a0fec16ebe199dc6889c5d0a332458da8985b2980dffe/rpds_py-0.25.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:659d87430a8c8c704d52d094f5ba6fa72ef13b4d385b7e542a08fc240cb4a559", size = 364498, upload-time = "2025-05-21T12:43:54.841Z" }, - { url = "https://files.pythonhosted.org/packages/1f/b4/4d3820f731c80fd0cd823b3e95b9963fec681ae45ba35b5281a42382c67d/rpds_py-0.25.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68f6f060f0bbdfb0245267da014d3a6da9be127fe3e8cc4a68c6f833f8a23bb1", size = 350083, upload-time = "2025-05-21T12:43:56.428Z" }, - { url = "https://files.pythonhosted.org/packages/d5/b1/3a8ee1c9d480e8493619a437dec685d005f706b69253286f50f498cbdbcf/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:083a9513a33e0b92cf6e7a6366036c6bb43ea595332c1ab5c8ae329e4bcc0a9c", size = 389023, upload-time = "2025-05-21T12:43:57.995Z" }, - { url = "https://files.pythonhosted.org/packages/3b/31/17293edcfc934dc62c3bf74a0cb449ecd549531f956b72287203e6880b87/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:816568614ecb22b18a010c7a12559c19f6fe993526af88e95a76d5a60b8b75fb", size = 403283, upload-time = "2025-05-21T12:43:59.546Z" }, - { url = "https://files.pythonhosted.org/packages/d1/ca/e0f0bc1a75a8925024f343258c8ecbd8828f8997ea2ac71e02f67b6f5299/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c6564c0947a7f52e4792983f8e6cf9bac140438ebf81f527a21d944f2fd0a40", size = 524634, upload-time = "2025-05-21T12:44:01.087Z" }, - { url = "https://files.pythonhosted.org/packages/3e/03/5d0be919037178fff33a6672ffc0afa04ea1cfcb61afd4119d1b5280ff0f/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c4a128527fe415d73cf1f70a9a688d06130d5810be69f3b553bf7b45e8acf79", size = 416233, upload-time = "2025-05-21T12:44:02.604Z" }, - { url = "https://files.pythonhosted.org/packages/05/7c/8abb70f9017a231c6c961a8941403ed6557664c0913e1bf413cbdc039e75/rpds_py-0.25.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e1d7a4978ed554f095430b89ecc23f42014a50ac385eb0c4d163ce213c325", size = 390375, upload-time = "2025-05-21T12:44:04.162Z" }, - { url = "https://files.pythonhosted.org/packages/7a/ac/a87f339f0e066b9535074a9f403b9313fd3892d4a164d5d5f5875ac9f29f/rpds_py-0.25.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d74ec9bc0e2feb81d3f16946b005748119c0f52a153f6db6a29e8cd68636f295", size = 424537, upload-time = "2025-05-21T12:44:06.175Z" }, - { url = "https://files.pythonhosted.org/packages/1f/8f/8d5c1567eaf8c8afe98a838dd24de5013ce6e8f53a01bd47fe8bb06b5533/rpds_py-0.25.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3af5b4cc10fa41e5bc64e5c198a1b2d2864337f8fcbb9a67e747e34002ce812b", size = 566425, upload-time = "2025-05-21T12:44:08.242Z" }, - { url = "https://files.pythonhosted.org/packages/95/33/03016a6be5663b389c8ab0bbbcca68d9e96af14faeff0a04affcb587e776/rpds_py-0.25.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:79dc317a5f1c51fd9c6a0c4f48209c6b8526d0524a6904fc1076476e79b00f98", size = 595197, upload-time = "2025-05-21T12:44:10.449Z" }, - { url = "https://files.pythonhosted.org/packages/33/8d/da9f4d3e208c82fda311bff0cf0a19579afceb77cf456e46c559a1c075ba/rpds_py-0.25.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1521031351865e0181bc585147624d66b3b00a84109b57fcb7a779c3ec3772cd", size = 561244, upload-time = "2025-05-21T12:44:12.387Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b3/39d5dcf7c5f742ecd6dbc88f6f84ae54184b92f5f387a4053be2107b17f1/rpds_py-0.25.1-cp313-cp313-win32.whl", hash = "sha256:5d473be2b13600b93a5675d78f59e63b51b1ba2d0476893415dfbb5477e65b31", size = 222254, upload-time = "2025-05-21T12:44:14.261Z" }, - { url = "https://files.pythonhosted.org/packages/5f/19/2d6772c8eeb8302c5f834e6d0dfd83935a884e7c5ce16340c7eaf89ce925/rpds_py-0.25.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7b74e92a3b212390bdce1d93da9f6488c3878c1d434c5e751cbc202c5e09500", size = 234741, upload-time = "2025-05-21T12:44:16.236Z" }, - { url = "https://files.pythonhosted.org/packages/5b/5a/145ada26cfaf86018d0eb304fe55eafdd4f0b6b84530246bb4a7c4fb5c4b/rpds_py-0.25.1-cp313-cp313-win_arm64.whl", hash = "sha256:dd326a81afe332ede08eb39ab75b301d5676802cdffd3a8f287a5f0b694dc3f5", size = 224830, upload-time = "2025-05-21T12:44:17.749Z" }, - { url = "https://files.pythonhosted.org/packages/4b/ca/d435844829c384fd2c22754ff65889c5c556a675d2ed9eb0e148435c6690/rpds_py-0.25.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:a58d1ed49a94d4183483a3ce0af22f20318d4a1434acee255d683ad90bf78129", size = 359668, upload-time = "2025-05-21T12:44:19.322Z" }, - { url = "https://files.pythonhosted.org/packages/1f/01/b056f21db3a09f89410d493d2f6614d87bb162499f98b649d1dbd2a81988/rpds_py-0.25.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f251bf23deb8332823aef1da169d5d89fa84c89f67bdfb566c49dea1fccfd50d", size = 345649, upload-time = "2025-05-21T12:44:20.962Z" }, - { url = "https://files.pythonhosted.org/packages/e0/0f/e0d00dc991e3d40e03ca36383b44995126c36b3eafa0ccbbd19664709c88/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbd586bfa270c1103ece2109314dd423df1fa3d9719928b5d09e4840cec0d72", size = 384776, upload-time = "2025-05-21T12:44:22.516Z" }, - { url = "https://files.pythonhosted.org/packages/9f/a2/59374837f105f2ca79bde3c3cd1065b2f8c01678900924949f6392eab66d/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d273f136e912aa101a9274c3145dcbddbe4bac560e77e6d5b3c9f6e0ed06d34", size = 395131, upload-time = "2025-05-21T12:44:24.147Z" }, - { url = "https://files.pythonhosted.org/packages/9c/dc/48e8d84887627a0fe0bac53f0b4631e90976fd5d35fff8be66b8e4f3916b/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:666fa7b1bd0a3810a7f18f6d3a25ccd8866291fbbc3c9b912b917a6715874bb9", size = 520942, upload-time = "2025-05-21T12:44:25.915Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f5/ee056966aeae401913d37befeeab57a4a43a4f00099e0a20297f17b8f00c/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:921954d7fbf3fccc7de8f717799304b14b6d9a45bbeec5a8d7408ccbf531faf5", size = 411330, upload-time = "2025-05-21T12:44:27.638Z" }, - { url = "https://files.pythonhosted.org/packages/ab/74/b2cffb46a097cefe5d17f94ede7a174184b9d158a0aeb195f39f2c0361e8/rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d86373ff19ca0441ebeb696ef64cb58b8b5cbacffcda5a0ec2f3911732a194", size = 387339, upload-time = "2025-05-21T12:44:29.292Z" }, - { url = "https://files.pythonhosted.org/packages/7f/9a/0ff0b375dcb5161c2b7054e7d0b7575f1680127505945f5cabaac890bc07/rpds_py-0.25.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c8980cde3bb8575e7c956a530f2c217c1d6aac453474bf3ea0f9c89868b531b6", size = 418077, upload-time = "2025-05-21T12:44:30.877Z" }, - { url = "https://files.pythonhosted.org/packages/0d/a1/fda629bf20d6b698ae84c7c840cfb0e9e4200f664fc96e1f456f00e4ad6e/rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8eb8c84ecea987a2523e057c0d950bcb3f789696c0499290b8d7b3107a719d78", size = 562441, upload-time = "2025-05-21T12:44:32.541Z" }, - { url = "https://files.pythonhosted.org/packages/20/15/ce4b5257f654132f326f4acd87268e1006cc071e2c59794c5bdf4bebbb51/rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e43a005671a9ed5a650f3bc39e4dbccd6d4326b24fb5ea8be5f3a43a6f576c72", size = 590750, upload-time = "2025-05-21T12:44:34.557Z" }, - { url = "https://files.pythonhosted.org/packages/fb/ab/e04bf58a8d375aeedb5268edcc835c6a660ebf79d4384d8e0889439448b0/rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58f77c60956501a4a627749a6dcb78dac522f249dd96b5c9f1c6af29bfacfb66", size = 558891, upload-time = "2025-05-21T12:44:37.358Z" }, - { url = "https://files.pythonhosted.org/packages/90/82/cb8c6028a6ef6cd2b7991e2e4ced01c854b6236ecf51e81b64b569c43d73/rpds_py-0.25.1-cp313-cp313t-win32.whl", hash = "sha256:2cb9e5b5e26fc02c8a4345048cd9998c2aca7c2712bd1b36da0c72ee969a3523", size = 218718, upload-time = "2025-05-21T12:44:38.969Z" }, - { url = "https://files.pythonhosted.org/packages/b6/97/5a4b59697111c89477d20ba8a44df9ca16b41e737fa569d5ae8bff99e650/rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763", size = 232218, upload-time = "2025-05-21T12:44:40.512Z" }, - { url = "https://files.pythonhosted.org/packages/89/74/716d42058ef501e2c08f27aa3ff455f6fc1bbbd19a6ab8dea07e6322d217/rpds_py-0.25.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ce4c8e485a3c59593f1a6f683cf0ea5ab1c1dc94d11eea5619e4fb5228b40fbd", size = 373475, upload-time = "2025-05-21T12:44:42.136Z" }, - { url = "https://files.pythonhosted.org/packages/e1/21/3faa9c523e2496a2505d7440b6f24c9166f37cb7ac027cac6cfbda9b4b5f/rpds_py-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8222acdb51a22929c3b2ddb236b69c59c72af4019d2cba961e2f9add9b6e634", size = 359349, upload-time = "2025-05-21T12:44:43.813Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/c747fe568d21b1d679079b52b926ebc4d1497457510a1773dc5fd4b7b4e2/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4593c4eae9b27d22df41cde518b4b9e4464d139e4322e2127daa9b5b981b76be", size = 386526, upload-time = "2025-05-21T12:44:45.452Z" }, - { url = "https://files.pythonhosted.org/packages/0b/cc/4a41703de4fb291f13660fa3d882cbd39db5d60497c6e7fa7f5142e5e69f/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd035756830c712b64725a76327ce80e82ed12ebab361d3a1cdc0f51ea21acb0", size = 400526, upload-time = "2025-05-21T12:44:47.011Z" }, - { url = "https://files.pythonhosted.org/packages/f1/78/60c980bedcad8418b614f0b4d6d420ecf11225b579cec0cb4e84d168b4da/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:114a07e85f32b125404f28f2ed0ba431685151c037a26032b213c882f26eb908", size = 525726, upload-time = "2025-05-21T12:44:48.838Z" }, - { url = "https://files.pythonhosted.org/packages/3f/37/f2f36b7f1314b3c3200d663decf2f8e29480492a39ab22447112aead4693/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dec21e02e6cc932538b5203d3a8bd6aa1480c98c4914cb88eea064ecdbc6396a", size = 412045, upload-time = "2025-05-21T12:44:50.433Z" }, - { url = "https://files.pythonhosted.org/packages/df/96/e03783e87a775b1242477ccbc35895f8e9b2bbdb60e199034a6da03c2687/rpds_py-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09eab132f41bf792c7a0ea1578e55df3f3e7f61888e340779b06050a9a3f16e9", size = 386953, upload-time = "2025-05-21T12:44:52.092Z" }, - { url = "https://files.pythonhosted.org/packages/7c/7d/1418f4b69bfb4b40481a3d84782113ad7d4cca0b38ae70b982dd5b20102a/rpds_py-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c98f126c4fc697b84c423e387337d5b07e4a61e9feac494362a59fd7a2d9ed80", size = 421144, upload-time = "2025-05-21T12:44:53.734Z" }, - { url = "https://files.pythonhosted.org/packages/b3/0e/61469912c6493ee3808012e60f4930344b974fcb6b35c4348e70b6be7bc7/rpds_py-0.25.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0e6a327af8ebf6baba1c10fadd04964c1965d375d318f4435d5f3f9651550f4a", size = 563730, upload-time = "2025-05-21T12:44:55.846Z" }, - { url = "https://files.pythonhosted.org/packages/f6/86/6d0a5cc56481ac61977b7c839677ed5c63d38cf0fcb3e2280843a8a6f476/rpds_py-0.25.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc120d1132cff853ff617754196d0ac0ae63befe7c8498bd67731ba368abe451", size = 592321, upload-time = "2025-05-21T12:44:57.514Z" }, - { url = "https://files.pythonhosted.org/packages/5d/87/d1e2453fe336f71e6aa296452a8c85c2118b587b1d25ce98014f75838a60/rpds_py-0.25.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:140f61d9bed7839446bdd44852e30195c8e520f81329b4201ceead4d64eb3a9f", size = 558162, upload-time = "2025-05-21T12:44:59.564Z" }, - { url = "https://files.pythonhosted.org/packages/ad/92/349f04b1644c5cef3e2e6c53b7168a28531945f9e6fca7425f6d20ddbc3c/rpds_py-0.25.1-cp39-cp39-win32.whl", hash = "sha256:9c006f3aadeda131b438c3092124bd196b66312f0caa5823ef09585a669cf449", size = 219920, upload-time = "2025-05-21T12:45:01.186Z" }, - { url = "https://files.pythonhosted.org/packages/f2/84/3969bef883a3f37ff2213795257cb7b7e93a115829670befb8de0e003031/rpds_py-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:a61d0b2c7c9a0ae45732a77844917b427ff16ad5464b4d4f5e4adb955f582890", size = 231452, upload-time = "2025-05-21T12:45:02.85Z" }, - { url = "https://files.pythonhosted.org/packages/78/ff/566ce53529b12b4f10c0a348d316bd766970b7060b4fd50f888be3b3b281/rpds_py-0.25.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b24bf3cd93d5b6ecfbedec73b15f143596c88ee249fa98cefa9a9dc9d92c6f28", size = 373931, upload-time = "2025-05-21T12:45:05.01Z" }, - { url = "https://files.pythonhosted.org/packages/83/5d/deba18503f7c7878e26aa696e97f051175788e19d5336b3b0e76d3ef9256/rpds_py-0.25.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0eb90e94f43e5085623932b68840b6f379f26db7b5c2e6bcef3179bd83c9330f", size = 359074, upload-time = "2025-05-21T12:45:06.714Z" }, - { url = "https://files.pythonhosted.org/packages/0d/74/313415c5627644eb114df49c56a27edba4d40cfd7c92bd90212b3604ca84/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d50e4864498a9ab639d6d8854b25e80642bd362ff104312d9770b05d66e5fb13", size = 387255, upload-time = "2025-05-21T12:45:08.669Z" }, - { url = "https://files.pythonhosted.org/packages/8c/c8/c723298ed6338963d94e05c0f12793acc9b91d04ed7c4ba7508e534b7385/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c9409b47ba0650544b0bb3c188243b83654dfe55dcc173a86832314e1a6a35d", size = 400714, upload-time = "2025-05-21T12:45:10.39Z" }, - { url = "https://files.pythonhosted.org/packages/33/8a/51f1f6aa653c2e110ed482ef2ae94140d56c910378752a1b483af11019ee/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:796ad874c89127c91970652a4ee8b00d56368b7e00d3477f4415fe78164c8000", size = 523105, upload-time = "2025-05-21T12:45:12.273Z" }, - { url = "https://files.pythonhosted.org/packages/c7/a4/7873d15c088ad3bff36910b29ceb0f178e4b3232c2adbe9198de68a41e63/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85608eb70a659bf4c1142b2781083d4b7c0c4e2c90eff11856a9754e965b2540", size = 411499, upload-time = "2025-05-21T12:45:13.95Z" }, - { url = "https://files.pythonhosted.org/packages/90/f3/0ce1437befe1410766d11d08239333ac1b2d940f8a64234ce48a7714669c/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4feb9211d15d9160bc85fa72fed46432cdc143eb9cf6d5ca377335a921ac37b", size = 387918, upload-time = "2025-05-21T12:45:15.649Z" }, - { url = "https://files.pythonhosted.org/packages/94/d4/5551247988b2a3566afb8a9dba3f1d4a3eea47793fd83000276c1a6c726e/rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ccfa689b9246c48947d31dd9d8b16d89a0ecc8e0e26ea5253068efb6c542b76e", size = 421705, upload-time = "2025-05-21T12:45:17.788Z" }, - { url = "https://files.pythonhosted.org/packages/b0/25/5960f28f847bf736cc7ee3c545a7e1d2f3b5edaf82c96fb616c2f5ed52d0/rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3c5b317ecbd8226887994852e85de562f7177add602514d4ac40f87de3ae45a8", size = 564489, upload-time = "2025-05-21T12:45:19.466Z" }, - { url = "https://files.pythonhosted.org/packages/02/66/1c99884a0d44e8c2904d3c4ec302f995292d5dde892c3bf7685ac1930146/rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:454601988aab2c6e8fd49e7634c65476b2b919647626208e376afcd22019eeb8", size = 592557, upload-time = "2025-05-21T12:45:21.362Z" }, - { url = "https://files.pythonhosted.org/packages/55/ae/4aeac84ebeffeac14abb05b3bb1d2f728d00adb55d3fb7b51c9fa772e760/rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1c0c434a53714358532d13539272db75a5ed9df75a4a090a753ac7173ec14e11", size = 558691, upload-time = "2025-05-21T12:45:23.084Z" }, - { url = "https://files.pythonhosted.org/packages/41/b3/728a08ff6f5e06fe3bb9af2e770e9d5fd20141af45cff8dfc62da4b2d0b3/rpds_py-0.25.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f73ce1512e04fbe2bc97836e89830d6b4314c171587a99688082d090f934d20a", size = 231651, upload-time = "2025-05-21T12:45:24.72Z" }, - { url = "https://files.pythonhosted.org/packages/49/74/48f3df0715a585cbf5d34919c9c757a4c92c1a9eba059f2d334e72471f70/rpds_py-0.25.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee86d81551ec68a5c25373c5643d343150cc54672b5e9a0cafc93c1870a53954", size = 374208, upload-time = "2025-05-21T12:45:26.306Z" }, - { url = "https://files.pythonhosted.org/packages/55/b0/9b01bb11ce01ec03d05e627249cc2c06039d6aa24ea5a22a39c312167c10/rpds_py-0.25.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89c24300cd4a8e4a51e55c31a8ff3918e6651b241ee8876a42cc2b2a078533ba", size = 359262, upload-time = "2025-05-21T12:45:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/a9/eb/5395621618f723ebd5116c53282052943a726dba111b49cd2071f785b665/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:771c16060ff4e79584dc48902a91ba79fd93eade3aa3a12d6d2a4aadaf7d542b", size = 387366, upload-time = "2025-05-21T12:45:30.42Z" }, - { url = "https://files.pythonhosted.org/packages/68/73/3d51442bdb246db619d75039a50ea1cf8b5b4ee250c3e5cd5c3af5981cd4/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:785ffacd0ee61c3e60bdfde93baa6d7c10d86f15655bd706c89da08068dc5038", size = 400759, upload-time = "2025-05-21T12:45:32.516Z" }, - { url = "https://files.pythonhosted.org/packages/b7/4c/3a32d5955d7e6cb117314597bc0f2224efc798428318b13073efe306512a/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a40046a529cc15cef88ac5ab589f83f739e2d332cb4d7399072242400ed68c9", size = 523128, upload-time = "2025-05-21T12:45:34.396Z" }, - { url = "https://files.pythonhosted.org/packages/be/95/1ffccd3b0bb901ae60b1dd4b1be2ab98bb4eb834cd9b15199888f5702f7b/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85fc223d9c76cabe5d0bff82214459189720dc135db45f9f66aa7cffbf9ff6c1", size = 411597, upload-time = "2025-05-21T12:45:36.164Z" }, - { url = "https://files.pythonhosted.org/packages/ef/6d/6e6cd310180689db8b0d2de7f7d1eabf3fb013f239e156ae0d5a1a85c27f/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0be9965f93c222fb9b4cc254235b3b2b215796c03ef5ee64f995b1b69af0762", size = 388053, upload-time = "2025-05-21T12:45:38.45Z" }, - { url = "https://files.pythonhosted.org/packages/4a/87/ec4186b1fe6365ced6fa470960e68fc7804bafbe7c0cf5a36237aa240efa/rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8378fa4a940f3fb509c081e06cb7f7f2adae8cf46ef258b0e0ed7519facd573e", size = 421821, upload-time = "2025-05-21T12:45:40.732Z" }, - { url = "https://files.pythonhosted.org/packages/7a/60/84f821f6bf4e0e710acc5039d91f8f594fae0d93fc368704920d8971680d/rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:33358883a4490287e67a2c391dfaea4d9359860281db3292b6886bf0be3d8692", size = 564534, upload-time = "2025-05-21T12:45:42.672Z" }, - { url = "https://files.pythonhosted.org/packages/41/3a/bc654eb15d3b38f9330fe0f545016ba154d89cdabc6177b0295910cd0ebe/rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1d1fadd539298e70cac2f2cb36f5b8a65f742b9b9f1014dd4ea1f7785e2470bf", size = 592674, upload-time = "2025-05-21T12:45:44.533Z" }, - { url = "https://files.pythonhosted.org/packages/2e/ba/31239736f29e4dfc7a58a45955c5db852864c306131fd6320aea214d5437/rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9a46c2fb2545e21181445515960006e85d22025bd2fe6db23e76daec6eb689fe", size = 558781, upload-time = "2025-05-21T12:45:46.281Z" }, - { url = "https://files.pythonhosted.org/packages/78/b2/198266f070c6760e0e8cd00f9f2b9c86133ceebbe7c6d114bdcfea200180/rpds_py-0.25.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50f2c501a89c9a5f4e454b126193c5495b9fb441a75b298c60591d8a2eb92e1b", size = 373973, upload-time = "2025-05-21T12:45:48.081Z" }, - { url = "https://files.pythonhosted.org/packages/13/79/1265eae618f88aa5d5e7122bd32dd41700bafe5a8bcea404e998848cd844/rpds_py-0.25.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d779b325cc8238227c47fbc53964c8cc9a941d5dbae87aa007a1f08f2f77b23", size = 359326, upload-time = "2025-05-21T12:45:49.825Z" }, - { url = "https://files.pythonhosted.org/packages/30/ab/6913b96f3ac072e87e76e45fe938263b0ab0d78b6b2cef3f2e56067befc0/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:036ded36bedb727beeabc16dc1dad7cb154b3fa444e936a03b67a86dc6a5066e", size = 387544, upload-time = "2025-05-21T12:45:51.764Z" }, - { url = "https://files.pythonhosted.org/packages/b0/23/129ed12d25229acc6deb8cbe90baadd8762e563c267c9594eb2fcc15be0c/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245550f5a1ac98504147cba96ffec8fabc22b610742e9150138e5d60774686d7", size = 400240, upload-time = "2025-05-21T12:45:54.061Z" }, - { url = "https://files.pythonhosted.org/packages/b5/e0/6811a38a5efa46b7ee6ed2103c95cb9abb16991544c3b69007aa679b6944/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff7c23ba0a88cb7b104281a99476cccadf29de2a0ef5ce864959a52675b1ca83", size = 525599, upload-time = "2025-05-21T12:45:56.457Z" }, - { url = "https://files.pythonhosted.org/packages/6c/10/2dc88bcaa0d86bdb59e017a330b1972ffeeb7f5061bb5a180c9a2bb73bbf/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e37caa8cdb3b7cf24786451a0bdb853f6347b8b92005eeb64225ae1db54d1c2b", size = 411154, upload-time = "2025-05-21T12:45:58.525Z" }, - { url = "https://files.pythonhosted.org/packages/cf/d1/a72d522eb7d934fb33e9c501e6ecae00e2035af924d4ff37d964e9a3959b/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2f48ab00181600ee266a095fe815134eb456163f7d6699f525dee471f312cf", size = 388297, upload-time = "2025-05-21T12:46:00.264Z" }, - { url = "https://files.pythonhosted.org/packages/55/90/0dd7169ec74f042405b6b73512200d637a3088c156f64e1c07c18aa2fe59/rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e5fc7484fa7dce57e25063b0ec9638ff02a908304f861d81ea49273e43838c1", size = 421894, upload-time = "2025-05-21T12:46:02.065Z" }, - { url = "https://files.pythonhosted.org/packages/37/e9/45170894add451783ed839c5c4a495e050aa8baa06d720364d9dff394dac/rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d3c10228d6cf6fe2b63d2e7985e94f6916fa46940df46b70449e9ff9297bd3d1", size = 564409, upload-time = "2025-05-21T12:46:03.891Z" }, - { url = "https://files.pythonhosted.org/packages/59/d0/31cece9090e76fbdb50c758c165d40da604b03b37c3ba53f010bbfeb130a/rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:5d9e40f32745db28c1ef7aad23f6fc458dc1e29945bd6781060f0d15628b8ddf", size = 592681, upload-time = "2025-05-21T12:46:06.009Z" }, - { url = "https://files.pythonhosted.org/packages/f1/4c/22ef535efb2beec614ba7be83e62b439eb83b0b0d7b1775e22d35af3f9b5/rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:35a8d1a24b5936b35c5003313bc177403d8bdef0f8b24f28b1c4a255f94ea992", size = 558744, upload-time = "2025-05-21T12:46:07.78Z" }, - { url = "https://files.pythonhosted.org/packages/79/ff/f2150efc8daf0581d4dfaf0a2a30b08088b6df900230ee5ae4f7c8cd5163/rpds_py-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6099263f526efff9cf3883dfef505518730f7a7a93049b1d90d42e50a22b4793", size = 231305, upload-time = "2025-05-21T12:46:10.52Z" }, -] - -[[package]] -name = "semantic-version" -version = "2.10.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/31/f2289ce78b9b473d582568c234e104d2a342fd658cc288a7553d83bb8595/semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c", size = 52289, upload-time = "2022-05-26T13:35:23.454Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/23/8146aad7d88f4fcb3a6218f41a60f6c2d4e3a72de72da1825dc7c8f7877c/semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177", size = 15552, upload-time = "2022-05-26T13:35:21.206Z" }, -] - -[[package]] -name = "send2trash" -version = "1.8.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fd/3a/aec9b02217bb79b87bbc1a21bc6abc51e3d5dcf65c30487ac96c0908c722/Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf", size = 17394, upload-time = "2024-04-07T00:01:09.267Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/b0/4562db6223154aa4e22f939003cb92514c79f3d4dccca3444253fd17f902/Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9", size = 18072, upload-time = "2024-04-07T00:01:07.438Z" }, -] - -[[package]] -name = "setuptools" -version = "80.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, -] - -[[package]] -name = "setuptools-rust" -version = "1.11.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "semantic-version" }, - { name = "setuptools" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e0/92/bf8589b1a2b6107cf9ec8daa9954c0b7620643fe1f37d31d75e572d995f5/setuptools_rust-1.11.1.tar.gz", hash = "sha256:7dabc4392252ced314b8050d63276e05fdc5d32398fc7d3cce1f6a6ac35b76c0", size = 310804, upload-time = "2025-04-04T14:28:10.576Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/01/37e1376f80578882e4f2d451f57d1fb42a599832057a123f57d9f26395c8/setuptools_rust-1.11.1-py3-none-any.whl", hash = "sha256:5eaaddaed268dc24a527ffa659ce56b22d3cf17b781247b779efd611031fe8ea", size = 28120, upload-time = "2025-04-04T14:28:09.564Z" }, -] - -[[package]] -name = "setuptools-scm" -version = "8.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "packaging" }, - { name = "setuptools" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/19/7ae64b70b2429c48c3a7a4ed36f50f94687d3bfcd0ae2f152367b6410dff/setuptools_scm-8.3.1.tar.gz", hash = "sha256:3d555e92b75dacd037d32bafdf94f97af51ea29ae8c7b234cf94b7a5bd242a63", size = 78088, upload-time = "2025-04-23T11:53:19.739Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/ac/8f96ba9b4cfe3e4ea201f23f4f97165862395e9331a424ed325ae37024a8/setuptools_scm-8.3.1-py3-none-any.whl", hash = "sha256:332ca0d43791b818b841213e76b1971b7711a960761c5bea5fc5cdb5196fbce3", size = 43935, upload-time = "2025-04-23T11:53:17.922Z" }, -] - -[[package]] -name = "six" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, -] - -[[package]] -name = "slotscheck" -version = "0.19.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4b/57/6fcb8df11e7c76eb87b23bfa931408e47f051c6161749c531b4060a45516/slotscheck-0.19.1.tar.gz", hash = "sha256:6146b7747f8db335a00a66b782f86011b74b995f61746dc5b36a9e77d5326013", size = 16050, upload-time = "2024-10-19T13:30:53.369Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/da/32/bd569256267f80b76b87d21a09795741a175778b954bee1d7b1a89852b6f/slotscheck-0.19.1-py3-none-any.whl", hash = "sha256:bff9926f8d6408ea21b6c6bbaa4389cea1682962e73ee4f30084b6d2b89260ee", size = 16995, upload-time = "2024-10-19T13:30:51.23Z" }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, -] - -[[package]] -name = "snowballstemmer" -version = "3.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, -] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, -] - -[[package]] -name = "soupsieve" -version = "2.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" }, -] - -[[package]] -name = "sphinx" -version = "7.4.7" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10'", -] -dependencies = [ - { name = "alabaster", version = "0.7.16", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "babel", marker = "python_full_version < '3.10'" }, - { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, - { name = "docutils", marker = "python_full_version < '3.10'" }, - { name = "imagesize", marker = "python_full_version < '3.10'" }, - { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, - { name = "jinja2", marker = "python_full_version < '3.10'" }, - { name = "packaging", marker = "python_full_version < '3.10'" }, - { name = "pygments", marker = "python_full_version < '3.10'" }, - { name = "requests", marker = "python_full_version < '3.10'" }, - { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.10'" }, - { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.10'" }, - { name = "tomli", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5b/be/50e50cb4f2eff47df05673d361095cafd95521d2a22521b920c67a372dcb/sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe", size = 8067911, upload-time = "2024-07-20T14:46:56.059Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/ef/153f6803c5d5f8917dbb7f7fcf6d34a871ede3296fa89c2c703f5f8a6c8e/sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239", size = 3401624, upload-time = "2024-07-20T14:46:52.142Z" }, -] - -[[package]] -name = "sphinx" -version = "8.1.3" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.14'", - "python_full_version == '3.13.*'", - "python_full_version >= '3.11' and python_full_version < '3.13'", - "python_full_version == '3.10.*'", -] -dependencies = [ - { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "babel", marker = "python_full_version >= '3.10'" }, - { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, - { name = "docutils", marker = "python_full_version >= '3.10'" }, - { name = "imagesize", marker = "python_full_version >= '3.10'" }, - { name = "jinja2", marker = "python_full_version >= '3.10'" }, - { name = "packaging", marker = "python_full_version >= '3.10'" }, - { name = "pygments", marker = "python_full_version >= '3.10'" }, - { name = "requests", marker = "python_full_version >= '3.10'" }, - { name = "snowballstemmer", marker = "python_full_version >= '3.10'" }, - { name = "sphinxcontrib-applehelp", marker = "python_full_version >= '3.10'" }, - { name = "sphinxcontrib-devhelp", marker = "python_full_version >= '3.10'" }, - { name = "sphinxcontrib-htmlhelp", marker = "python_full_version >= '3.10'" }, - { name = "sphinxcontrib-jsmath", marker = "python_full_version >= '3.10'" }, - { name = "sphinxcontrib-qthelp", marker = "python_full_version >= '3.10'" }, - { name = "sphinxcontrib-serializinghtml", marker = "python_full_version >= '3.10'" }, - { name = "tomli", marker = "python_full_version == '3.10.*'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611, upload-time = "2024-10-13T20:27:13.93Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125, upload-time = "2024-10-13T20:27:10.448Z" }, -] - -[[package]] -name = "sphinx-rtd-theme" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "sphinxcontrib-jquery" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463, upload-time = "2024-11-13T11:06:04.545Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561, upload-time = "2024-11-13T11:06:02.094Z" }, -] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, -] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, -] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, -] - -[[package]] -name = "sphinxcontrib-jquery" -version = "4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331, upload-time = "2023-03-14T15:01:01.944Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104, upload-time = "2023-03-14T15:01:00.356Z" }, -] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, -] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, -] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "asttokens" }, - { name = "executing" }, - { name = "pure-eval" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, -] - -[[package]] -name = "stdlibs" -version = "2025.5.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/48/6f/92324b26048ff76b02dbb616d26b51a57e751bac7a7934016bb25a407725/stdlibs-2025.5.10.tar.gz", hash = "sha256:75d55a0b7b070ec44bd7dae5bc1ee1a6cea742122fb4253313cb4ab354f7f0c5", size = 19625, upload-time = "2025-05-11T03:46:42.917Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/45/51/a8f17bbb8f01cef657153972a99e382ce5c5e33a1a2df959f3ed2ebe2b89/stdlibs-2025.5.10-py3-none-any.whl", hash = "sha256:25178d9c2b45d2680292413bf59a20293355d45056ec92d32ea6ed349ce9e2a1", size = 57264, upload-time = "2025-05-11T03:46:41.633Z" }, -] - -[[package]] -name = "tabulate" -version = "0.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" }, -] - -[[package]] -name = "terminado" -version = "0.18.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ptyprocess", marker = "os_name != 'nt'" }, - { name = "pywinpty", marker = "os_name == 'nt'" }, - { name = "tornado" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8a/11/965c6fd8e5cc254f1fe142d547387da17a8ebfd75a3455f637c663fb38a0/terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e", size = 32701, upload-time = "2024-03-12T14:34:39.026Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/9e/2064975477fdc887e47ad42157e214526dcad8f317a948dee17e1659a62f/terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", size = 14154, upload-time = "2024-03-12T14:34:36.569Z" }, -] - -[[package]] -name = "testslide" -version = "2.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "psutil" }, - { name = "pygments" }, - { name = "typeguard" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ee/6f/c8d6d60a597c693559dab3b3362bd01e2212530e9a163eb0164af81e1ec1/TestSlide-2.7.1.tar.gz", hash = "sha256:d25890d5c383f673fac44a5f9e2561b7118d04f29f2c2b3d4f549e6db94cb34d", size = 50255, upload-time = "2023-03-16T14:09:41.204Z" } - -[[package]] -name = "tinycss2" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "webencodings" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" }, -] - -[[package]] -name = "toml" -version = "0.10.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload-time = "2020-11-01T01:40:22.204Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" }, -] - -[[package]] -name = "tomli" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, -] - -[[package]] -name = "tomlkit" -version = "0.13.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/18/0bbf3884e9eaa38819ebe46a7bd25dcd56b67434402b66a58c4b8e552575/tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1", size = 185207, upload-time = "2025-06-05T07:13:44.947Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/75/8539d011f6be8e29f339c42e633aae3cb73bffa95dd0f9adec09b9c58e85/tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0", size = 38901, upload-time = "2025-06-05T07:13:43.546Z" }, -] - -[[package]] -name = "tornado" -version = "6.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/51/89/c72771c81d25d53fe33e3dca61c233b665b2780f21820ba6fd2c6793c12b/tornado-6.5.1.tar.gz", hash = "sha256:84ceece391e8eb9b2b95578db65e920d2a61070260594819589609ba9bc6308c", size = 509934, upload-time = "2025-05-22T18:15:38.788Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/89/f4532dee6843c9e0ebc4e28d4be04c67f54f60813e4bf73d595fe7567452/tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7", size = 441948, upload-time = "2025-05-22T18:15:20.862Z" }, - { url = "https://files.pythonhosted.org/packages/15/9a/557406b62cffa395d18772e0cdcf03bed2fff03b374677348eef9f6a3792/tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6", size = 440112, upload-time = "2025-05-22T18:15:22.591Z" }, - { url = "https://files.pythonhosted.org/packages/55/82/7721b7319013a3cf881f4dffa4f60ceff07b31b394e459984e7a36dc99ec/tornado-6.5.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b77e9dfa7ed69754a54c89d82ef746398be82f749df69c4d3abe75c4d1ff4888", size = 443672, upload-time = "2025-05-22T18:15:24.027Z" }, - { url = "https://files.pythonhosted.org/packages/7d/42/d11c4376e7d101171b94e03cef0cbce43e823ed6567ceda571f54cf6e3ce/tornado-6.5.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253b76040ee3bab8bcf7ba9feb136436a3787208717a1fb9f2c16b744fba7331", size = 443019, upload-time = "2025-05-22T18:15:25.735Z" }, - { url = "https://files.pythonhosted.org/packages/7d/f7/0c48ba992d875521ac761e6e04b0a1750f8150ae42ea26df1852d6a98942/tornado-6.5.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:308473f4cc5a76227157cdf904de33ac268af770b2c5f05ca6c1161d82fdd95e", size = 443252, upload-time = "2025-05-22T18:15:27.499Z" }, - { url = "https://files.pythonhosted.org/packages/89/46/d8d7413d11987e316df4ad42e16023cd62666a3c0dfa1518ffa30b8df06c/tornado-6.5.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:caec6314ce8a81cf69bd89909f4b633b9f523834dc1a352021775d45e51d9401", size = 443930, upload-time = "2025-05-22T18:15:29.299Z" }, - { url = "https://files.pythonhosted.org/packages/78/b2/f8049221c96a06df89bed68260e8ca94beca5ea532ffc63b1175ad31f9cc/tornado-6.5.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:13ce6e3396c24e2808774741331638ee6c2f50b114b97a55c5b442df65fd9692", size = 443351, upload-time = "2025-05-22T18:15:31.038Z" }, - { url = "https://files.pythonhosted.org/packages/76/ff/6a0079e65b326cc222a54720a748e04a4db246870c4da54ece4577bfa702/tornado-6.5.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5cae6145f4cdf5ab24744526cc0f55a17d76f02c98f4cff9daa08ae9a217448a", size = 443328, upload-time = "2025-05-22T18:15:32.426Z" }, - { url = "https://files.pythonhosted.org/packages/49/18/e3f902a1d21f14035b5bc6246a8c0f51e0eef562ace3a2cea403c1fb7021/tornado-6.5.1-cp39-abi3-win32.whl", hash = "sha256:e0a36e1bc684dca10b1aa75a31df8bdfed656831489bc1e6a6ebed05dc1ec365", size = 444396, upload-time = "2025-05-22T18:15:34.205Z" }, - { url = "https://files.pythonhosted.org/packages/7b/09/6526e32bf1049ee7de3bebba81572673b19a2a8541f795d887e92af1a8bc/tornado-6.5.1-cp39-abi3-win_amd64.whl", hash = "sha256:908e7d64567cecd4c2b458075589a775063453aeb1d2a1853eedb806922f568b", size = 444840, upload-time = "2025-05-22T18:15:36.1Z" }, - { url = "https://files.pythonhosted.org/packages/55/a7/535c44c7bea4578e48281d83c615219f3ab19e6abc67625ef637c73987be/tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7", size = 443596, upload-time = "2025-05-22T18:15:37.433Z" }, -] - -[[package]] -name = "trailrunner" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pathspec" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4d/93/630e10bacd897daeb9ff5a408f4e7cb0fc2f243e7e3ef00f9e6cf319b11c/trailrunner-1.4.0.tar.gz", hash = "sha256:3fe61e259e6b2e5192f321c265985b7a0dc18497ced62b2da244f08104978398", size = 15836, upload-time = "2023-03-27T07:54:35.515Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/29/21001afea86bac5016c3940b43de3ce4786b0d8337d4ea79bb903c649ce3/trailrunner-1.4.0-py3-none-any.whl", hash = "sha256:a286d39f2723f28d167347f41cf8f232832648709366e722f55cf5545772a48e", size = 11071, upload-time = "2023-03-27T07:54:32.514Z" }, -] - -[[package]] -name = "traitlets" -version = "5.14.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, -] - -[[package]] -name = "typeguard" -version = "2.13.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3a/38/c61bfcf62a7b572b5e9363a802ff92559cb427ee963048e1442e3aef7490/typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4", size = 40604, upload-time = "2021-12-10T21:09:39.158Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/bb/d43e5c75054e53efce310e79d63df0ac3f25e34c926be5dffb7d283fb2a8/typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1", size = 17605, upload-time = "2021-12-10T21:09:37.844Z" }, -] - -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20250516" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ef/88/d65ed807393285204ab6e2801e5d11fbbea811adcaa979a2ed3b67a5ef41/types_python_dateutil-2.9.0.20250516.tar.gz", hash = "sha256:13e80d6c9c47df23ad773d54b2826bd52dbbb41be87c3f339381c1700ad21ee5", size = 13943, upload-time = "2025-05-16T03:06:58.385Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/3f/b0e8db149896005adc938a1e7f371d6d7e9eca4053a29b108978ed15e0c2/types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93", size = 14356, upload-time = "2025-05-16T03:06:57.249Z" }, -] - -[[package]] -name = "typing-extensions" -version = "4.14.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" }, -] - -[[package]] -name = "typing-inspect" -version = "0.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mypy-extensions" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825, upload-time = "2023-05-24T20:25:47.612Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, -] - -[[package]] -name = "ufmt" -version = "2.8.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "black" }, - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "libcst" }, - { name = "moreorless" }, - { name = "tomlkit" }, - { name = "trailrunner" }, - { name = "typing-extensions" }, - { name = "usort" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/18/f8/c25e242a8e12062172dea4117859757a11339bbc39b1a3c7fb6a6de03bb2/ufmt-2.8.0.tar.gz", hash = "sha256:72c9502915497678de9aeab8aa18604890f14f869f7f378dd26e2878bde84f13", size = 24482, upload-time = "2024-10-25T06:21:57.239Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/4b/3f1b6f566b6cf70ccc5cba9a638fe4459f1e373c34d74df2e40e41871d70/ufmt-2.8.0-py3-none-any.whl", hash = "sha256:47a690811c576ebd3a0e30d77d43b65c84240e5c1611e5cb4a880bdd7f4507c1", size = 28268, upload-time = "2024-10-25T06:21:55.822Z" }, -] - -[[package]] -name = "uri-template" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/31/c7/0336f2bd0bcbada6ccef7aaa25e443c118a704f828a0620c6fa0207c1b64/uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7", size = 21678, upload-time = "2023-06-21T01:49:05.374Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140, upload-time = "2023-06-21T01:49:03.467Z" }, -] - -[[package]] -name = "urllib3" -version = "2.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, -] - -[[package]] -name = "usort" -version = "1.0.8.post1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "click", version = "8.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "libcst" }, - { name = "moreorless" }, - { name = "stdlibs" }, - { name = "toml" }, - { name = "trailrunner" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9b/f4/3ef48b43f2645f2cb4a37d6007e611bc669af44eecfee953c5dd57433011/usort-1.0.8.post1.tar.gz", hash = "sha256:68def75f2b20b97390c552c503e071ee06c65ad502c5f94f3bd03f095cf4dfe6", size = 83215, upload-time = "2024-02-12T04:29:33.632Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/55/cc51ceb3d93763b9d28def24615bc485212525550967ce9e992a455f9ab5/usort-1.0.8.post1-py3-none-any.whl", hash = "sha256:6c57cdf17b458c79f8a61eb3ce8bf3f93e36d3c2edd602b9b2aa16b6875d3255", size = 37281, upload-time = "2024-02-12T04:29:31.693Z" }, -] - -[[package]] -name = "wcwidth" -version = "0.2.13" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, -] - -[[package]] -name = "webcolors" -version = "24.11.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7b/29/061ec845fb58521848f3739e466efd8250b4b7b98c1b6c5bf4d40b419b7e/webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6", size = 45064, upload-time = "2024-11-11T07:43:24.224Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/60/e8/c0e05e4684d13459f93d312077a9a2efbe04d59c393bc2b8802248c908d4/webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9", size = 14934, upload-time = "2024-11-11T07:43:22.529Z" }, -] - -[[package]] -name = "webencodings" -version = "0.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, -] - -[[package]] -name = "websocket-client" -version = "1.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, -] - -[[package]] -name = "widgetsnbextension" -version = "4.0.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/53/2e0253c5efd69c9656b1843892052a31c36d37ad42812b5da45c62191f7e/widgetsnbextension-4.0.14.tar.gz", hash = "sha256:a3629b04e3edb893212df862038c7232f62973373869db5084aed739b437b5af", size = 1097428, upload-time = "2025-04-10T13:01:25.628Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/51/5447876806d1088a0f8f71e16542bf350918128d0a69437df26047c8e46f/widgetsnbextension-4.0.14-py3-none-any.whl", hash = "sha256:4875a9eaf72fbf5079dc372a51a9f268fc38d46f767cbf85c43a36da5cb9b575", size = 2196503, upload-time = "2025-04-10T13:01:23.086Z" }, -] - -[[package]] -name = "zipp" -version = "3.23.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, -] diff --git a/zizmor.yml b/zizmor.yml deleted file mode 100644 index faf5a0f0..00000000 --- a/zizmor.yml +++ /dev/null @@ -1,5 +0,0 @@ -rules: - unpinned-uses: - config: - policies: - "*": ref-pin \ No newline at end of file