mirror of
https://github.com/Instagram/LibCST.git
synced 2025-12-23 10:35:53 +00:00
Compare commits
29 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5e40e8769 | ||
|
|
b75343e74e | ||
|
|
9275a8bf78 | ||
|
|
b66c0e2822 | ||
|
|
c2169d240b | ||
|
|
73b17d8449 | ||
|
|
421f7d3400 | ||
|
|
129b20f476 | ||
|
|
6f5da5f998 | ||
|
|
7c906eb47c | ||
|
|
de5635394b | ||
|
|
47cacb69a3 | ||
|
|
3b5329aa20 | ||
|
|
48668dfabb | ||
|
|
0c82bfa761 | ||
|
|
f40d835145 | ||
|
|
d721a06c3f | ||
|
|
e064729b4c | ||
|
|
f746afd537 | ||
|
|
2048e6693c | ||
|
|
441a7f0c81 | ||
|
|
7090a0db2b | ||
|
|
b395d7ccf7 | ||
|
|
9542fc3882 | ||
|
|
aa53960458 | ||
|
|
2931c86e07 | ||
|
|
2fb4b2dd58 | ||
|
|
4bc2116d2a | ||
|
|
287ab059a0 |
65 changed files with 2301 additions and 351 deletions
10
.github/workflows/build.yml
vendored
10
.github/workflows/build.yml
vendored
|
|
@ -10,10 +10,8 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# macos-13 is an intel runner, macos-latest is apple silicon
|
||||
os:
|
||||
[
|
||||
macos-13,
|
||||
macos-latest,
|
||||
ubuntu-latest,
|
||||
ubuntu-24.04-arm,
|
||||
|
|
@ -28,14 +26,10 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
- name: Set MACOSX_DEPLOYMENT_TARGET for Intel MacOS
|
||||
if: matrix.os == 'macos-13'
|
||||
run: >-
|
||||
echo MACOSX_DEPLOYMENT_TARGET=10.12 >> $GITHUB_ENV
|
||||
- name: Disable scmtools local scheme
|
||||
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
|
||||
run: >-
|
||||
|
|
@ -44,7 +38,7 @@ jobs:
|
|||
if: github.event_name != 'release'
|
||||
run: echo CIBW_ENABLE=cpython-prerelease >> $GITHUB_ENV
|
||||
- name: Build wheels
|
||||
uses: pypa/cibuildwheel@v3.0.0rc2
|
||||
uses: pypa/cibuildwheel@v3.2.1
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: wheelhouse/*.whl
|
||||
|
|
|
|||
31
.github/workflows/ci.yml
vendored
31
.github/workflows/ci.yml
vendored
|
|
@ -26,9 +26,9 @@ jobs:
|
|||
- "3.14t"
|
||||
steps:
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "0.7.12"
|
||||
version: "0.7.13"
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
|
@ -39,15 +39,8 @@ jobs:
|
|||
run: uv sync --locked --dev
|
||||
- name: Native Parser Tests
|
||||
run: uv run poe test
|
||||
- name: Pure Parser Tests
|
||||
env:
|
||||
COVERAGE_FILE: .coverage.pure
|
||||
LIBCST_PARSER_TYPE: pure
|
||||
run: uv run poe test
|
||||
- name: Coverage
|
||||
run: |
|
||||
uv run coverage combine .coverage.pure
|
||||
uv run coverage report
|
||||
run: uv run coverage report
|
||||
|
||||
# Run linters
|
||||
lint:
|
||||
|
|
@ -58,9 +51,9 @@ jobs:
|
|||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "0.7.12"
|
||||
version: "0.7.13"
|
||||
python-version: "3.10"
|
||||
- run: uv run poe lint
|
||||
- run: uv run poe fixtures
|
||||
|
|
@ -74,9 +67,9 @@ jobs:
|
|||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "0.7.12"
|
||||
version: "0.7.13"
|
||||
python-version: "3.10"
|
||||
- run: uv run poe typecheck
|
||||
|
||||
|
|
@ -89,9 +82,9 @@ jobs:
|
|||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "0.7.12"
|
||||
version: "0.7.13"
|
||||
python-version: "3.10"
|
||||
- uses: ts-graphviz/setup-graphviz@v2
|
||||
- run: uv run --group docs poe docs
|
||||
|
|
@ -117,7 +110,7 @@ jobs:
|
|||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt, clippy
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: test
|
||||
|
|
@ -143,3 +136,7 @@ jobs:
|
|||
- run: rustup component add rustfmt
|
||||
- name: format
|
||||
run: cargo fmt --all --manifest-path=native/Cargo.toml -- --check
|
||||
build:
|
||||
# only trigger here for pull requests - regular pushes are handled in pypi_upload
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
uses: Instagram/LibCST/.github/workflows/build.yml@main
|
||||
|
|
|
|||
8
.github/workflows/pypi_upload.yml
vendored
8
.github/workflows/pypi_upload.yml
vendored
|
|
@ -25,18 +25,18 @@ jobs:
|
|||
persist-credentials: false
|
||||
- name: Download binary wheels
|
||||
id: download
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheelhouse
|
||||
merge-multiple: true
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.10"
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@v7
|
||||
with:
|
||||
version: "0.7.12"
|
||||
version: "0.7.13"
|
||||
enable-cache: false
|
||||
- name: Build a source tarball
|
||||
env:
|
||||
|
|
|
|||
4
.github/workflows/zizmor.yml
vendored
4
.github/workflows/zizmor.yml
vendored
|
|
@ -21,7 +21,7 @@ jobs:
|
|||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
|
||||
- name: Run zizmor 🌈
|
||||
run: uvx zizmor --format sarif . > results.sarif
|
||||
|
|
@ -29,7 +29,7 @@ jobs:
|
|||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
uses: github/codeql-action/upload-sarif@v4
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
category: zizmor
|
||||
44
CHANGELOG.md
44
CHANGELOG.md
|
|
@ -1,3 +1,47 @@
|
|||
# 1.8.6 - 2025-11-03
|
||||
|
||||
## What's Changed
|
||||
* Update pyproject.toml for 3.14t by @itamaro in https://github.com/Instagram/LibCST/pull/1417
|
||||
* Update PyO3 to 0.26 by @cjwatson in https://github.com/Instagram/LibCST/pull/1413
|
||||
* Make CodemodCommand's supported_transforms order deterministic by @frvnkliu in https://github.com/Instagram/LibCST/pull/1424
|
||||
|
||||
## New Contributors
|
||||
* @cjwatson made their first contribution in https://github.com/Instagram/LibCST/pull/1413
|
||||
* @frvnkliu made their first contribution in https://github.com/Instagram/LibCST/pull/1424
|
||||
|
||||
**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.5...v1.8.6
|
||||
|
||||
# 1.8.5 - 2025-09-25
|
||||
|
||||
## What's Changed
|
||||
* fixed: circular import error by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1406
|
||||
|
||||
|
||||
# 1.8.4 - 2025-09-09
|
||||
|
||||
## What's Changed
|
||||
* fixed: generate Attribute nodes when applying type annotations by @tungol in https://github.com/Instagram/LibCST/pull/1396
|
||||
* added: Support parsing of t-strings #1374 by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1398
|
||||
* added: add support for PEP758 by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1401
|
||||
|
||||
## New Contributors
|
||||
* @tungol made their first contribution in https://github.com/Instagram/LibCST/pull/1396
|
||||
|
||||
**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.2...v1.8.4
|
||||
|
||||
# 1.8.3 - 2025-08-29
|
||||
## What's Changed
|
||||
* removed: remove entry points to pure parser by @drinkmorewaterr in https://github.com/Instagram/LibCST/pull/1375
|
||||
* fixed: fixes match statements to work with PositionProvider by @imsut in https://github.com/Instagram/LibCST/pull/1389
|
||||
|
||||
|
||||
## New Contributors
|
||||
* @hunterhogan made their first contribution in https://github.com/Instagram/LibCST/pull/1378
|
||||
* @thomas-serre-sonarsource made their first contribution in https://github.com/Instagram/LibCST/pull/1379
|
||||
* @imsut made their first contribution in https://github.com/Instagram/LibCST/pull/1389
|
||||
|
||||
**Full Changelog**: https://github.com/Instagram/LibCST/compare/v1.8.2...v1.8.3
|
||||
|
||||
# 1.8.2 - 2025-06-13
|
||||
|
||||
# Fixed
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@
|
|||
1. Version bumps are generally not worth mentioning with some notable exceptions (like pyo3)
|
||||
1. Group related PRs into one bullet point if it makes sense
|
||||
2. manually bump versions in `Cargo.toml` files in the repo
|
||||
3. make a new PR with the above changes, get it reviewed and landed
|
||||
4. make a new release on Github, create a new tag on publish, and copy the contents of the changelog entry in there
|
||||
5. after publishing, check out the repo at the new tag, and run `cd native; cargo +nightly publish -Z package-workspace -p libcst_derive -p libcst`
|
||||
3. run `cargo update -p libcst`
|
||||
4. make a new PR with the above changes, get it reviewed and landed
|
||||
5. make a new release on Github, create a new tag on publish, and copy the contents of the changelog entry in there
|
||||
6. after publishing, check out the repo at the new tag, and run `cd native; cargo +nightly publish -Z package-workspace -p libcst_derive -p libcst`
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ A Concrete Syntax Tree (CST) parser and serializer library for Python
|
|||
|
||||
.. intro-start
|
||||
|
||||
LibCST parses Python 3.0 -> 3.13 source code as a CST tree that keeps
|
||||
LibCST parses Python 3.0 -> 3.14 source code as a CST tree that keeps
|
||||
all formatting details (comments, whitespaces, parentheses, etc). It's useful for
|
||||
building automated refactoring (codemod) applications and linters.
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@
|
|||
"Parsing and Visiting\n",
|
||||
"====================\n",
|
||||
"\n",
|
||||
"LibCST provides helpers to parse source code string as concrete syntax tree. In order to perform static analysis to identify patterns in the tree or modify the tree programmatically, we can use visitor pattern to traverse the tree. In this tutorial, we demonstrate a common four-step-workflow to build an automated refactoring (codemod) application:\n",
|
||||
"LibCST provides helpers to parse source code string as a concrete syntax tree. In order to perform static analysis to identify patterns in the tree or modify the tree programmatically, we can use the visitor pattern to traverse the tree. In this tutorial, we demonstrate a common four-step-workflow to build an automated refactoring (codemod) application:\n",
|
||||
"\n",
|
||||
"1. `Parse Source Code <#Parse-Source-Code>`_\n",
|
||||
"2. `Display The Source Code CST <#Display-Source-Code-CST>`_\n",
|
||||
|
|
@ -19,7 +19,7 @@
|
|||
"\n",
|
||||
"Parse Source Code\n",
|
||||
"=================\n",
|
||||
"LibCST provides various helpers to parse source code as concrete syntax tree: :func:`~libcst.parse_module`, :func:`~libcst.parse_expression` and :func:`~libcst.parse_statement` (see :doc:`Parsing <parser>` for more detail)."
|
||||
"LibCST provides various helpers to parse source code as a concrete syntax tree: :func:`~libcst.parse_module`, :func:`~libcst.parse_expression` and :func:`~libcst.parse_statement` (see :doc:`Parsing <parser>` for more detail)."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -90,7 +90,7 @@
|
|||
"|\n",
|
||||
"Example: add typing annotation from pyi stub file to Python source\n",
|
||||
"------------------------------------------------------------------\n",
|
||||
"Python `typing annotation <https://mypy.readthedocs.io/en/latest/cheat_sheet_py3.html>`_ was added in Python 3.5. Some Python applications add typing annotations in separate ``pyi`` stub files in order to support old Python versions. When applications decide to stop supporting old Python versions, they'll want to automatically copy the type annotation from a pyi file to a source file. Here we demonstrate how to do that easliy using LibCST. The first step is to parse the pyi stub and source files as trees."
|
||||
"Python `typing annotation <https://mypy.readthedocs.io/en/latest/cheat_sheet_py3.html>`_ was added in Python 3.5. Some Python applications add typing annotations in separate ``pyi`` stub files in order to support old Python versions. When applications decide to stop supporting old Python versions, they'll want to automatically copy the type annotation from a pyi file to a source file. Here we demonstrate how to do that easily using LibCST. The first step is to parse the pyi stub and source files as trees."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -106,7 +106,7 @@
|
|||
" self._replace(type=self.type.name))\n",
|
||||
"\n",
|
||||
"def tokenize(code, version_info, start_pos=(1, 0)):\n",
|
||||
" \"\"\"Generate tokens from a the source code (string).\"\"\"\n",
|
||||
" \"\"\"Generate tokens from the source code (string).\"\"\"\n",
|
||||
" lines = split_lines(code, keepends=True)\n",
|
||||
" return tokenize_lines(lines, version_info, start_pos=start_pos)\n",
|
||||
"'''\n",
|
||||
|
|
@ -134,7 +134,7 @@
|
|||
"Build Visitor or Transformer\n",
|
||||
"============================\n",
|
||||
"For traversing and modifying the tree, LibCST provides Visitor and Transformer classes similar to the `ast module <https://docs.python.org/3/library/ast.html#ast.NodeVisitor>`_. To implement a visitor (read only) or transformer (read/write), simply implement a subclass of :class:`~libcst.CSTVisitor` or :class:`~libcst.CSTTransformer` (see :doc:`Visitors <visitors>` for more detail).\n",
|
||||
"In the typing example, we need to implement a visitor to collect typing annotation from the stub tree and a transformer to copy the annotation to the function signature. In the visitor, we implement ``visit_FunctionDef`` to collect annotations. Later in the transformer, we implement ``leave_FunctionDef`` to add the collected annotations."
|
||||
"In the typing example, we need to implement a visitor to collect typing annotations from the stub tree and a transformer to copy the annotation to the function signature. In the visitor, we implement ``visit_FunctionDef`` to collect annotations. Later in the transformer, we implement ``leave_FunctionDef`` to add the collected annotations."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -226,7 +226,7 @@
|
|||
"|\n",
|
||||
"Generate Source Code\n",
|
||||
"====================\n",
|
||||
"Generating the source code from a cst tree is as easy as accessing the :attr:`~libcst.Module.code` attribute on :class:`~libcst.Module`. After the code generation, we often use `ufmt <https://ufmt.omnilib.dev/en/stable/>`_ to reformate the code to keep a consistent coding style."
|
||||
"Generating the source code from a cst tree is as easy as accessing the :attr:`~libcst.Module.code` attribute on :class:`~libcst.Module`. After the code generation, we often use `ufmt <https://ufmt.omnilib.dev/en/stable/>`_ to reformat the code to keep a consistent coding style."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ from libcst._nodes.expression import (
|
|||
BaseSimpleComp,
|
||||
BaseSlice,
|
||||
BaseString,
|
||||
BaseTemplatedStringContent,
|
||||
BinaryOperation,
|
||||
BooleanOperation,
|
||||
Call,
|
||||
|
|
@ -75,6 +76,9 @@ from libcst._nodes.expression import (
|
|||
StarredElement,
|
||||
Subscript,
|
||||
SubscriptElement,
|
||||
TemplatedString,
|
||||
TemplatedStringExpression,
|
||||
TemplatedStringText,
|
||||
Tuple,
|
||||
UnaryOperation,
|
||||
Yield,
|
||||
|
|
@ -268,6 +272,7 @@ __all__ = [
|
|||
"BaseElement",
|
||||
"BaseExpression",
|
||||
"BaseFormattedStringContent",
|
||||
"BaseTemplatedStringContent",
|
||||
"BaseList",
|
||||
"BaseNumber",
|
||||
"BaseSet",
|
||||
|
|
@ -291,6 +296,9 @@ __all__ = [
|
|||
"FormattedString",
|
||||
"FormattedStringExpression",
|
||||
"FormattedStringText",
|
||||
"TemplatedString",
|
||||
"TemplatedStringText",
|
||||
"TemplatedStringExpression",
|
||||
"From",
|
||||
"GeneratorExp",
|
||||
"IfExp",
|
||||
|
|
|
|||
|
|
@ -958,6 +958,253 @@ class FormattedString(_BasePrefixedString):
|
|||
state.add_token(self.end)
|
||||
|
||||
|
||||
class BaseTemplatedStringContent(CSTNode, ABC):
|
||||
"""
|
||||
The base type for :class:`TemplatedStringText` and
|
||||
:class:`TemplatedStringExpression`. A :class:`TemplatedString` is composed of a
|
||||
sequence of :class:`BaseTemplatedStringContent` parts.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
@add_slots
|
||||
@dataclass(frozen=True)
|
||||
class TemplatedStringText(BaseTemplatedStringContent):
|
||||
"""
|
||||
Part of a :class:`TemplatedString` that is not inside curly braces (``{`` or ``}``).
|
||||
For example, in::
|
||||
|
||||
f"ab{cd}ef"
|
||||
|
||||
``ab`` and ``ef`` are :class:`TemplatedStringText` nodes, but ``{cd}`` is a
|
||||
:class:`TemplatedStringExpression`.
|
||||
"""
|
||||
|
||||
#: The raw string value, including any escape characters present in the source
|
||||
#: code, not including any enclosing quotes.
|
||||
value: str
|
||||
|
||||
def _visit_and_replace_children(
|
||||
self, visitor: CSTVisitorT
|
||||
) -> "TemplatedStringText":
|
||||
return TemplatedStringText(value=self.value)
|
||||
|
||||
def _codegen_impl(self, state: CodegenState) -> None:
|
||||
state.add_token(self.value)
|
||||
|
||||
|
||||
@add_slots
|
||||
@dataclass(frozen=True)
|
||||
class TemplatedStringExpression(BaseTemplatedStringContent):
|
||||
"""
|
||||
Part of a :class:`TemplatedString` that is inside curly braces (``{`` or ``}``),
|
||||
including the surrounding curly braces. For example, in::
|
||||
|
||||
f"ab{cd}ef"
|
||||
|
||||
``{cd}`` is a :class:`TemplatedStringExpression`, but ``ab`` and ``ef`` are
|
||||
:class:`TemplatedStringText` nodes.
|
||||
|
||||
An t-string expression may contain ``conversion`` and ``format_spec`` suffixes that
|
||||
control how the expression is converted to a string.
|
||||
"""
|
||||
|
||||
#: The expression we will evaluate and render when generating the string.
|
||||
expression: BaseExpression
|
||||
|
||||
#: An optional conversion specifier, such as ``!s``, ``!r`` or ``!a``.
|
||||
conversion: Optional[str] = None
|
||||
|
||||
#: An optional format specifier following the `format specification mini-language
|
||||
#: <https://docs.python.org/3/library/string.html#formatspec>`_.
|
||||
format_spec: Optional[Sequence[BaseTemplatedStringContent]] = None
|
||||
|
||||
#: Whitespace after the opening curly brace (``{``), but before the ``expression``.
|
||||
whitespace_before_expression: BaseParenthesizableWhitespace = (
|
||||
SimpleWhitespace.field("")
|
||||
)
|
||||
|
||||
#: Whitespace after the ``expression``, but before the ``conversion``,
|
||||
#: ``format_spec`` and the closing curly brace (``}``). Python does not
|
||||
#: allow whitespace inside or after a ``conversion`` or ``format_spec``.
|
||||
whitespace_after_expression: BaseParenthesizableWhitespace = SimpleWhitespace.field(
|
||||
""
|
||||
)
|
||||
|
||||
#: Equal sign for Templated string expression uses self-documenting expressions,
|
||||
#: such as ``f"{x=}"``. See the `Python 3.8 release notes
|
||||
#: <https://docs.python.org/3/whatsnew/3.8.html#f-strings-support-for-self-documenting-expressions-and-debugging>`_.
|
||||
equal: Optional[AssignEqual] = None
|
||||
|
||||
def _validate(self) -> None:
|
||||
if self.conversion is not None and self.conversion not in ("s", "r", "a"):
|
||||
raise CSTValidationError("Invalid t-string conversion.")
|
||||
|
||||
def _visit_and_replace_children(
|
||||
self, visitor: CSTVisitorT
|
||||
) -> "TemplatedStringExpression":
|
||||
format_spec = self.format_spec
|
||||
return TemplatedStringExpression(
|
||||
whitespace_before_expression=visit_required(
|
||||
self,
|
||||
"whitespace_before_expression",
|
||||
self.whitespace_before_expression,
|
||||
visitor,
|
||||
),
|
||||
expression=visit_required(self, "expression", self.expression, visitor),
|
||||
equal=visit_optional(self, "equal", self.equal, visitor),
|
||||
whitespace_after_expression=visit_required(
|
||||
self,
|
||||
"whitespace_after_expression",
|
||||
self.whitespace_after_expression,
|
||||
visitor,
|
||||
),
|
||||
conversion=self.conversion,
|
||||
format_spec=(
|
||||
visit_sequence(self, "format_spec", format_spec, visitor)
|
||||
if format_spec is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
def _codegen_impl(self, state: CodegenState) -> None:
|
||||
state.add_token("{")
|
||||
self.whitespace_before_expression._codegen(state)
|
||||
self.expression._codegen(state)
|
||||
equal = self.equal
|
||||
if equal is not None:
|
||||
equal._codegen(state)
|
||||
self.whitespace_after_expression._codegen(state)
|
||||
conversion = self.conversion
|
||||
if conversion is not None:
|
||||
state.add_token("!")
|
||||
state.add_token(conversion)
|
||||
format_spec = self.format_spec
|
||||
if format_spec is not None:
|
||||
state.add_token(":")
|
||||
for spec in format_spec:
|
||||
spec._codegen(state)
|
||||
state.add_token("}")
|
||||
|
||||
|
||||
@add_slots
|
||||
@dataclass(frozen=True)
|
||||
class TemplatedString(_BasePrefixedString):
|
||||
"""
|
||||
An "t-string". Template strings are a generalization of f-strings,
|
||||
using a t in place of the f prefix. Instead of evaluating to str,
|
||||
t-strings evaluate to a new type: Template
|
||||
|
||||
T-Strings are defined in 'PEP 750'
|
||||
|
||||
>>> import libcst as cst
|
||||
>>> cst.parse_expression('t"ab{cd}ef"')
|
||||
TemplatedString(
|
||||
parts=[
|
||||
TemplatedStringText(
|
||||
value='ab',
|
||||
),
|
||||
TemplatedStringExpression(
|
||||
expression=Name(
|
||||
value='cd',
|
||||
lpar=[],
|
||||
rpar=[],
|
||||
),
|
||||
conversion=None,
|
||||
format_spec=None,
|
||||
whitespace_before_expression=SimpleWhitespace(
|
||||
value='',
|
||||
),
|
||||
whitespace_after_expression=SimpleWhitespace(
|
||||
value='',
|
||||
),
|
||||
equal=None,
|
||||
),
|
||||
TemplatedStringText(
|
||||
value='ef',
|
||||
),
|
||||
],
|
||||
start='t"',
|
||||
end='"',
|
||||
lpar=[],
|
||||
rpar=[],
|
||||
)
|
||||
>>>
|
||||
"""
|
||||
|
||||
#: A templated string is composed as a series of :class:`TemplatedStringText` and
|
||||
#: :class:`TemplatedStringExpression` parts.
|
||||
parts: Sequence[BaseTemplatedStringContent]
|
||||
|
||||
#: The string prefix and the leading quote, such as ``t"``, ``T'``, ``tr"``, or
|
||||
#: ``t"""``.
|
||||
start: str = 't"'
|
||||
|
||||
#: The trailing quote. This must match the type of quote used in ``start``.
|
||||
end: Literal['"', "'", '"""', "'''"] = '"'
|
||||
|
||||
lpar: Sequence[LeftParen] = ()
|
||||
#: Sequence of parenthesis for precidence dictation.
|
||||
rpar: Sequence[RightParen] = ()
|
||||
|
||||
def _validate(self) -> None:
|
||||
super(_BasePrefixedString, self)._validate()
|
||||
|
||||
# Validate any prefix
|
||||
prefix = self.prefix
|
||||
if prefix not in ("t", "tr", "rt"):
|
||||
raise CSTValidationError("Invalid t-string prefix.")
|
||||
|
||||
# Validate wrapping quotes
|
||||
starttoken = self.start[len(prefix) :]
|
||||
if starttoken != self.end:
|
||||
raise CSTValidationError("t-string must have matching enclosing quotes.")
|
||||
|
||||
# Validate valid wrapping quote usage
|
||||
if starttoken not in ('"', "'", '"""', "'''"):
|
||||
raise CSTValidationError("Invalid t-string enclosing quotes.")
|
||||
|
||||
@property
|
||||
def prefix(self) -> str:
|
||||
"""
|
||||
Returns the string's prefix, if any exists. The prefix can be ``t``,
|
||||
``tr``, or ``rt``.
|
||||
"""
|
||||
|
||||
prefix = ""
|
||||
for c in self.start:
|
||||
if c in ['"', "'"]:
|
||||
break
|
||||
prefix += c
|
||||
return prefix.lower()
|
||||
|
||||
@property
|
||||
def quote(self) -> StringQuoteLiteral:
|
||||
"""
|
||||
Returns the quotation used to denote the string. Can be either ``'``,
|
||||
``"``, ``'''`` or ``\"\"\"``.
|
||||
"""
|
||||
|
||||
return self.end
|
||||
|
||||
def _visit_and_replace_children(self, visitor: CSTVisitorT) -> "TemplatedString":
|
||||
return TemplatedString(
|
||||
lpar=visit_sequence(self, "lpar", self.lpar, visitor),
|
||||
start=self.start,
|
||||
parts=visit_sequence(self, "parts", self.parts, visitor),
|
||||
end=self.end,
|
||||
rpar=visit_sequence(self, "rpar", self.rpar, visitor),
|
||||
)
|
||||
|
||||
def _codegen_impl(self, state: CodegenState) -> None:
|
||||
with self._parenthesize(state):
|
||||
state.add_token(self.start)
|
||||
for part in self.parts:
|
||||
part._codegen(state)
|
||||
state.add_token(self.end)
|
||||
|
||||
|
||||
@add_slots
|
||||
@dataclass(frozen=True)
|
||||
class ConcatenatedString(BaseString):
|
||||
|
|
|
|||
|
|
@ -2886,6 +2886,9 @@ class MatchCase(CSTNode):
|
|||
state.add_token("if")
|
||||
self.whitespace_after_if._codegen(state)
|
||||
guard._codegen(state)
|
||||
else:
|
||||
self.whitespace_before_if._codegen(state)
|
||||
self.whitespace_after_if._codegen(state)
|
||||
|
||||
self.whitespace_before_colon._codegen(state)
|
||||
state.add_token(":")
|
||||
|
|
@ -3473,6 +3476,13 @@ class MatchAs(MatchPattern):
|
|||
state.add_token(" ")
|
||||
elif isinstance(ws_after, BaseParenthesizableWhitespace):
|
||||
ws_after._codegen(state)
|
||||
else:
|
||||
ws_before = self.whitespace_before_as
|
||||
if isinstance(ws_before, BaseParenthesizableWhitespace):
|
||||
ws_before._codegen(state)
|
||||
ws_after = self.whitespace_after_as
|
||||
if isinstance(ws_after, BaseParenthesizableWhitespace):
|
||||
ws_after._codegen(state)
|
||||
if name is None:
|
||||
state.add_token("_")
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ from typing import Any
|
|||
import libcst as cst
|
||||
from libcst import parse_expression
|
||||
from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -1184,7 +1183,7 @@ class AtomTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_versions(self, **kwargs: Any) -> None:
|
||||
if is_native() and not kwargs.get("expect_success", True):
|
||||
if not kwargs.get("expect_success", True):
|
||||
self.skipTest("parse errors are disabled for native parser")
|
||||
self.assert_parses(**kwargs)
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Any
|
|||
import libcst as cst
|
||||
from libcst import parse_expression
|
||||
from libcst._nodes.tests.base import CSTNodeTest
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -189,4 +188,4 @@ class BinaryOperationTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_parse_error(self, **kwargs: Any) -> None:
|
||||
self.assert_parses(**kwargs, expect_success=not is_native())
|
||||
self.assert_parses(**kwargs, expect_success=False)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Any, Callable
|
|||
import libcst as cst
|
||||
from libcst import parse_statement
|
||||
from libcst._nodes.tests.base import CSTNodeTest
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -210,8 +209,6 @@ class ClassDefCreationTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_valid_native(self, **kwargs: Any) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("Disabled for pure python parser")
|
||||
self.validate_node(**kwargs)
|
||||
|
||||
@data_provider(
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Any
|
|||
import libcst as cst
|
||||
from libcst import parse_expression
|
||||
from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -188,6 +187,6 @@ class DictTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_versions(self, **kwargs: Any) -> None:
|
||||
if is_native() and not kwargs.get("expect_success", True):
|
||||
if not kwargs.get("expect_success", True):
|
||||
self.skipTest("parse errors are disabled for native parser")
|
||||
self.assert_parses(**kwargs)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Any, Callable
|
|||
import libcst as cst
|
||||
from libcst import parse_statement
|
||||
from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -741,8 +740,6 @@ class FunctionDefCreationTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_valid(self, **kwargs: Any) -> None:
|
||||
if not is_native() and kwargs.get("native_only", False):
|
||||
self.skipTest("Disabled for native parser")
|
||||
if "native_only" in kwargs:
|
||||
kwargs.pop("native_only")
|
||||
self.validate_node(**kwargs)
|
||||
|
|
@ -891,8 +888,6 @@ class FunctionDefCreationTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_valid_native(self, **kwargs: Any) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("Disabled for pure python parser")
|
||||
self.validate_node(**kwargs)
|
||||
|
||||
@data_provider(
|
||||
|
|
@ -2223,8 +2218,6 @@ class FunctionDefParserTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_valid_38(self, node: cst.CSTNode, code: str, **kwargs: Any) -> None:
|
||||
if not is_native() and kwargs.get("native_only", False):
|
||||
self.skipTest("disabled for pure python parser")
|
||||
self.validate_node(node, code, _parse_statement_force_38)
|
||||
|
||||
@data_provider(
|
||||
|
|
@ -2252,7 +2245,7 @@ class FunctionDefParserTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_versions(self, **kwargs: Any) -> None:
|
||||
if is_native() and not kwargs.get("expect_success", True):
|
||||
if not kwargs.get("expect_success", True):
|
||||
self.skipTest("parse errors are disabled for native parser")
|
||||
self.assert_parses(**kwargs)
|
||||
|
||||
|
|
@ -2271,6 +2264,4 @@ class FunctionDefParserTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_parse_error(self, **kwargs: Any) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("Skipped for non-native parser")
|
||||
self.assert_parses(**kwargs, expect_success=False, parser=parse_statement)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Any, Callable
|
|||
import libcst as cst
|
||||
from libcst import parse_expression, parse_statement
|
||||
from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -126,6 +125,6 @@ class ListTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_versions(self, **kwargs: Any) -> None:
|
||||
if is_native() and not kwargs.get("expect_success", True):
|
||||
if not kwargs.get("expect_success", True):
|
||||
self.skipTest("parse errors are disabled for native parser")
|
||||
self.assert_parses(**kwargs)
|
||||
|
|
|
|||
|
|
@ -3,17 +3,14 @@
|
|||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
from typing import Any, Callable, Optional
|
||||
from typing import Any, Callable
|
||||
|
||||
import libcst as cst
|
||||
from libcst import parse_statement
|
||||
from libcst._nodes.tests.base import CSTNodeTest
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
parser: Optional[Callable[[str], cst.CSTNode]] = (
|
||||
parse_statement if is_native() else None
|
||||
)
|
||||
parser: Callable[[str], cst.CSTNode] = parse_statement
|
||||
|
||||
|
||||
class MatchTest(CSTNodeTest):
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ from libcst._nodes.tests.base import (
|
|||
parse_expression_as,
|
||||
parse_statement_as,
|
||||
)
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
||||
|
|
@ -70,6 +69,6 @@ class NamedExprTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_versions(self, **kwargs: Any) -> None:
|
||||
if is_native() and not kwargs.get("expect_success", True):
|
||||
if not kwargs.get("expect_success", True):
|
||||
self.skipTest("parse errors are disabled for native parser")
|
||||
self.assert_parses(**kwargs)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from typing import cast, Tuple
|
|||
import libcst as cst
|
||||
from libcst import parse_module, parse_statement
|
||||
from libcst._nodes.tests.base import CSTNodeTest
|
||||
from libcst._parser.entrypoints import is_native
|
||||
|
||||
from libcst.metadata import CodeRange, MetadataWrapper, PositionProvider
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -117,7 +117,7 @@ class ModuleTest(CSTNodeTest):
|
|||
def test_parser(
|
||||
self, *, code: str, expected: cst.Module, enabled_for_native: bool = True
|
||||
) -> None:
|
||||
if is_native() and not enabled_for_native:
|
||||
if not enabled_for_native:
|
||||
self.skipTest("Disabled for native parser")
|
||||
self.assertEqual(parse_module(code), expected)
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Any, Callable
|
|||
import libcst as cst
|
||||
from libcst import parse_expression
|
||||
from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
||||
|
|
@ -133,6 +132,6 @@ class ListTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_versions(self, **kwargs: Any) -> None:
|
||||
if is_native() and not kwargs.get("expect_success", True):
|
||||
if not kwargs.get("expect_success", True):
|
||||
self.skipTest("parse errors are disabled for native parser")
|
||||
self.assert_parses(**kwargs)
|
||||
|
|
|
|||
183
libcst/_nodes/tests/test_template_strings.py
Normal file
183
libcst/_nodes/tests/test_template_strings.py
Normal file
|
|
@ -0,0 +1,183 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
#
|
||||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
from typing import Callable, Optional
|
||||
|
||||
import libcst as cst
|
||||
from libcst import parse_expression
|
||||
from libcst._nodes.tests.base import CSTNodeTest
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
||||
class TemplatedStringTest(CSTNodeTest):
|
||||
@data_provider(
|
||||
(
|
||||
# Simple t-string with only text
|
||||
(
|
||||
cst.TemplatedString(
|
||||
parts=(cst.TemplatedStringText("hello world"),),
|
||||
),
|
||||
't"hello world"',
|
||||
True,
|
||||
),
|
||||
# t-string with one expression
|
||||
(
|
||||
cst.TemplatedString(
|
||||
parts=(
|
||||
cst.TemplatedStringText("hello "),
|
||||
cst.TemplatedStringExpression(
|
||||
expression=cst.Name("name"),
|
||||
),
|
||||
),
|
||||
),
|
||||
't"hello {name}"',
|
||||
True,
|
||||
),
|
||||
# t-string with multiple expressions
|
||||
(
|
||||
cst.TemplatedString(
|
||||
parts=(
|
||||
cst.TemplatedStringText("a="),
|
||||
cst.TemplatedStringExpression(expression=cst.Name("a")),
|
||||
cst.TemplatedStringText(", b="),
|
||||
cst.TemplatedStringExpression(expression=cst.Name("b")),
|
||||
),
|
||||
),
|
||||
't"a={a}, b={b}"',
|
||||
True,
|
||||
CodeRange((1, 0), (1, 15)),
|
||||
),
|
||||
# t-string with nested expression
|
||||
(
|
||||
cst.TemplatedString(
|
||||
parts=(
|
||||
cst.TemplatedStringText("sum="),
|
||||
cst.TemplatedStringExpression(
|
||||
expression=cst.BinaryOperation(
|
||||
left=cst.Name("a"),
|
||||
operator=cst.Add(),
|
||||
right=cst.Name("b"),
|
||||
)
|
||||
),
|
||||
),
|
||||
),
|
||||
't"sum={a + b}"',
|
||||
True,
|
||||
),
|
||||
# t-string with spacing in expression
|
||||
(
|
||||
cst.TemplatedString(
|
||||
parts=(
|
||||
cst.TemplatedStringText("x = "),
|
||||
cst.TemplatedStringExpression(
|
||||
whitespace_before_expression=cst.SimpleWhitespace(" "),
|
||||
expression=cst.Name("x"),
|
||||
whitespace_after_expression=cst.SimpleWhitespace(" "),
|
||||
),
|
||||
),
|
||||
),
|
||||
't"x = { x }"',
|
||||
True,
|
||||
),
|
||||
# t-string with escaped braces
|
||||
(
|
||||
cst.TemplatedString(
|
||||
parts=(cst.TemplatedStringText("{{foo}}"),),
|
||||
),
|
||||
't"{{foo}}"',
|
||||
True,
|
||||
),
|
||||
# t-string with only an expression
|
||||
(
|
||||
cst.TemplatedString(
|
||||
parts=(
|
||||
cst.TemplatedStringExpression(expression=cst.Name("value")),
|
||||
),
|
||||
),
|
||||
't"{value}"',
|
||||
True,
|
||||
),
|
||||
# t-string with whitespace and newlines
|
||||
(
|
||||
cst.TemplatedString(
|
||||
parts=(
|
||||
cst.TemplatedStringText("line1\\n"),
|
||||
cst.TemplatedStringExpression(expression=cst.Name("x")),
|
||||
cst.TemplatedStringText("\\nline2"),
|
||||
),
|
||||
),
|
||||
't"line1\\n{x}\\nline2"',
|
||||
True,
|
||||
),
|
||||
# t-string with parenthesis (not typical, but test node construction)
|
||||
(
|
||||
cst.TemplatedString(
|
||||
lpar=(cst.LeftParen(),),
|
||||
parts=(cst.TemplatedStringText("foo"),),
|
||||
rpar=(cst.RightParen(),),
|
||||
),
|
||||
'(t"foo")',
|
||||
True,
|
||||
),
|
||||
# t-string with whitespace in delimiters
|
||||
(
|
||||
cst.TemplatedString(
|
||||
lpar=(cst.LeftParen(whitespace_after=cst.SimpleWhitespace(" ")),),
|
||||
parts=(cst.TemplatedStringText("foo"),),
|
||||
rpar=(cst.RightParen(whitespace_before=cst.SimpleWhitespace(" ")),),
|
||||
),
|
||||
'( t"foo" )',
|
||||
True,
|
||||
),
|
||||
# Test TemplatedStringText and TemplatedStringExpression individually
|
||||
(
|
||||
cst.TemplatedStringText("abc"),
|
||||
"abc",
|
||||
False,
|
||||
CodeRange((1, 0), (1, 3)),
|
||||
),
|
||||
(
|
||||
cst.TemplatedStringExpression(expression=cst.Name("foo")),
|
||||
"{foo}",
|
||||
False,
|
||||
CodeRange((1, 0), (1, 5)),
|
||||
),
|
||||
)
|
||||
)
|
||||
def test_valid(
|
||||
self,
|
||||
node: cst.CSTNode,
|
||||
code: str,
|
||||
check_parsing: bool,
|
||||
position: Optional[CodeRange] = None,
|
||||
) -> None:
|
||||
if check_parsing:
|
||||
self.validate_node(node, code, parse_expression, expected_position=position)
|
||||
else:
|
||||
self.validate_node(node, code, expected_position=position)
|
||||
|
||||
@data_provider(
|
||||
(
|
||||
(
|
||||
lambda: cst.TemplatedString(
|
||||
parts=(cst.TemplatedStringText("foo"),),
|
||||
lpar=(cst.LeftParen(),),
|
||||
),
|
||||
"left paren without right paren",
|
||||
),
|
||||
(
|
||||
lambda: cst.TemplatedString(
|
||||
parts=(cst.TemplatedStringText("foo"),),
|
||||
rpar=(cst.RightParen(),),
|
||||
),
|
||||
"right paren without left paren",
|
||||
),
|
||||
)
|
||||
)
|
||||
def test_invalid(
|
||||
self, get_node: Callable[[], cst.CSTNode], expected_re: str
|
||||
) -> None:
|
||||
self.assert_invalid(get_node, expected_re)
|
||||
|
|
@ -3,18 +3,15 @@
|
|||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
from typing import Any, Callable, Optional
|
||||
from typing import Any, Callable
|
||||
|
||||
import libcst as cst
|
||||
from libcst import parse_statement
|
||||
from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
native_parse_statement: Optional[Callable[[str], cst.CSTNode]] = (
|
||||
parse_statement if is_native() else None
|
||||
)
|
||||
native_parse_statement: Callable[[str], cst.CSTNode] = parse_statement
|
||||
|
||||
|
||||
class TryTest(CSTNodeTest):
|
||||
|
|
@ -347,6 +344,34 @@ class TryTest(CSTNodeTest):
|
|||
),
|
||||
"code": "try: pass\nexcept foo()as bar: pass\n",
|
||||
},
|
||||
# PEP758 - Multiple exceptions with no parentheses
|
||||
{
|
||||
"node": cst.Try(
|
||||
cst.SimpleStatementSuite((cst.Pass(),)),
|
||||
handlers=[
|
||||
cst.ExceptHandler(
|
||||
cst.SimpleStatementSuite((cst.Pass(),)),
|
||||
type=cst.Tuple(
|
||||
elements=[
|
||||
cst.Element(
|
||||
value=cst.Name(
|
||||
value="ValueError",
|
||||
),
|
||||
),
|
||||
cst.Element(
|
||||
value=cst.Name(
|
||||
value="RuntimeError",
|
||||
),
|
||||
),
|
||||
],
|
||||
lpar=[],
|
||||
rpar=[],
|
||||
),
|
||||
)
|
||||
],
|
||||
),
|
||||
"code": "try: pass\nexcept ValueError, RuntimeError: pass\n",
|
||||
},
|
||||
)
|
||||
)
|
||||
def test_valid(self, **kwargs: Any) -> None:
|
||||
|
|
@ -579,6 +604,38 @@ class TryStarTest(CSTNodeTest):
|
|||
"parser": native_parse_statement,
|
||||
"expected_position": CodeRange((1, 0), (5, 13)),
|
||||
},
|
||||
# PEP758 - Multiple exceptions with no parentheses
|
||||
{
|
||||
"node": cst.TryStar(
|
||||
cst.SimpleStatementSuite((cst.Pass(),)),
|
||||
handlers=[
|
||||
cst.ExceptStarHandler(
|
||||
cst.SimpleStatementSuite((cst.Pass(),)),
|
||||
type=cst.Tuple(
|
||||
elements=[
|
||||
cst.Element(
|
||||
value=cst.Name(
|
||||
value="ValueError",
|
||||
),
|
||||
comma=cst.Comma(
|
||||
whitespace_after=cst.SimpleWhitespace(" ")
|
||||
),
|
||||
),
|
||||
cst.Element(
|
||||
value=cst.Name(
|
||||
value="RuntimeError",
|
||||
),
|
||||
),
|
||||
],
|
||||
lpar=[],
|
||||
rpar=[],
|
||||
),
|
||||
)
|
||||
],
|
||||
),
|
||||
"code": "try: pass\nexcept* ValueError, RuntimeError: pass\n",
|
||||
"parser": native_parse_statement,
|
||||
},
|
||||
)
|
||||
)
|
||||
def test_valid(self, **kwargs: Any) -> None:
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Any, Callable
|
|||
import libcst as cst
|
||||
from libcst import parse_expression, parse_statement
|
||||
from libcst._nodes.tests.base import CSTNodeTest, parse_expression_as
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -286,6 +285,6 @@ class TupleTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_versions(self, **kwargs: Any) -> None:
|
||||
if is_native() and not kwargs.get("expect_success", True):
|
||||
if not kwargs.get("expect_success", True):
|
||||
self.skipTest("parse errors are disabled for native parser")
|
||||
self.assert_parses(**kwargs)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Any
|
|||
import libcst as cst
|
||||
from libcst import parse_statement
|
||||
from libcst._nodes.tests.base import CSTNodeTest
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -132,8 +131,6 @@ class TypeAliasCreationTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_valid(self, **kwargs: Any) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("Disabled in the old parser")
|
||||
self.validate_node(**kwargs)
|
||||
|
||||
|
||||
|
|
@ -252,6 +249,4 @@ class TypeAliasParserTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_valid(self, **kwargs: Any) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("Disabled in the old parser")
|
||||
self.validate_node(**kwargs)
|
||||
|
|
|
|||
|
|
@ -7,9 +7,7 @@ from typing import Any
|
|||
|
||||
import libcst as cst
|
||||
from libcst import parse_statement, PartialParserConfig
|
||||
from libcst._maybe_sentinel import MaybeSentinel
|
||||
from libcst._nodes.tests.base import CSTNodeTest, DummyIndentedBlock, parse_statement_as
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
||||
|
|
@ -187,14 +185,14 @@ class WithTest(CSTNodeTest):
|
|||
cst.WithItem(
|
||||
cst.Call(
|
||||
cst.Name("context_mgr"),
|
||||
lpar=() if is_native() else (cst.LeftParen(),),
|
||||
rpar=() if is_native() else (cst.RightParen(),),
|
||||
lpar=(),
|
||||
rpar=(),
|
||||
)
|
||||
),
|
||||
),
|
||||
cst.SimpleStatementSuite((cst.Pass(),)),
|
||||
lpar=(cst.LeftParen() if is_native() else MaybeSentinel.DEFAULT),
|
||||
rpar=(cst.RightParen() if is_native() else MaybeSentinel.DEFAULT),
|
||||
lpar=(cst.LeftParen()),
|
||||
rpar=(cst.RightParen()),
|
||||
whitespace_after_with=cst.SimpleWhitespace(""),
|
||||
),
|
||||
"code": "with(context_mgr()): pass\n",
|
||||
|
|
@ -233,7 +231,7 @@ class WithTest(CSTNodeTest):
|
|||
rpar=cst.RightParen(whitespace_before=cst.SimpleWhitespace(" ")),
|
||||
),
|
||||
"code": ("with ( foo(),\n" " bar(), ): pass\n"), # noqa
|
||||
"parser": parse_statement if is_native() else None,
|
||||
"parser": parse_statement,
|
||||
"expected_position": CodeRange((1, 0), (2, 21)),
|
||||
},
|
||||
)
|
||||
|
|
@ -310,7 +308,7 @@ class WithTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_versions(self, **kwargs: Any) -> None:
|
||||
if is_native() and not kwargs.get("expect_success", True):
|
||||
if not kwargs.get("expect_success", True):
|
||||
self.skipTest("parse errors are disabled for native parser")
|
||||
self.assert_parses(**kwargs)
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ from typing import Any, Callable, Optional
|
|||
import libcst as cst
|
||||
from libcst import parse_statement
|
||||
from libcst._nodes.tests.base import CSTNodeTest, parse_statement_as
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.helpers import ensure_type
|
||||
from libcst.metadata import CodeRange
|
||||
from libcst.testing.utils import data_provider
|
||||
|
|
@ -241,6 +240,6 @@ class YieldParsingTest(CSTNodeTest):
|
|||
)
|
||||
)
|
||||
def test_versions(self, **kwargs: Any) -> None:
|
||||
if is_native() and not kwargs.get("expect_success", True):
|
||||
if not kwargs.get("expect_success", True):
|
||||
self.skipTest("parse errors are disabled for native parser")
|
||||
self.assert_parses(**kwargs)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ parser. A parser entrypoint should take the source code and some configuration
|
|||
information
|
||||
"""
|
||||
|
||||
import os
|
||||
from functools import partial
|
||||
from typing import Union
|
||||
|
||||
|
|
@ -17,19 +16,12 @@ from libcst._nodes.base import CSTNode
|
|||
from libcst._nodes.expression import BaseExpression
|
||||
from libcst._nodes.module import Module
|
||||
from libcst._nodes.statement import BaseCompoundStatement, SimpleStatementLine
|
||||
from libcst._parser.detect_config import convert_to_utf8, detect_config
|
||||
from libcst._parser.grammar import get_grammar, validate_grammar
|
||||
from libcst._parser.python_parser import PythonCSTParser
|
||||
from libcst._parser.detect_config import convert_to_utf8
|
||||
from libcst._parser.types.config import PartialParserConfig
|
||||
|
||||
_DEFAULT_PARTIAL_PARSER_CONFIG: PartialParserConfig = PartialParserConfig()
|
||||
|
||||
|
||||
def is_native() -> bool:
|
||||
typ = os.environ.get("LIBCST_PARSER_TYPE")
|
||||
return typ != "pure"
|
||||
|
||||
|
||||
def _parse(
|
||||
entrypoint: str,
|
||||
source: Union[str, bytes],
|
||||
|
|
@ -38,57 +30,21 @@ def _parse(
|
|||
detect_trailing_newline: bool,
|
||||
detect_default_newline: bool,
|
||||
) -> CSTNode:
|
||||
if is_native():
|
||||
from libcst.native import parse_expression, parse_module, parse_statement
|
||||
|
||||
encoding, source_str = convert_to_utf8(source, partial=config)
|
||||
encoding, source_str = convert_to_utf8(source, partial=config)
|
||||
|
||||
if entrypoint == "file_input":
|
||||
parse = partial(parse_module, encoding=encoding)
|
||||
elif entrypoint == "stmt_input":
|
||||
parse = parse_statement
|
||||
elif entrypoint == "expression_input":
|
||||
parse = parse_expression
|
||||
else:
|
||||
raise ValueError(f"Unknown parser entry point: {entrypoint}")
|
||||
from libcst import native
|
||||
|
||||
return parse(source_str)
|
||||
return _pure_python_parse(
|
||||
entrypoint,
|
||||
source,
|
||||
config,
|
||||
detect_trailing_newline=detect_trailing_newline,
|
||||
detect_default_newline=detect_default_newline,
|
||||
)
|
||||
if entrypoint == "file_input":
|
||||
parse = partial(native.parse_module, encoding=encoding)
|
||||
elif entrypoint == "stmt_input":
|
||||
parse = native.parse_statement
|
||||
elif entrypoint == "expression_input":
|
||||
parse = native.parse_expression
|
||||
else:
|
||||
raise ValueError(f"Unknown parser entry point: {entrypoint}")
|
||||
|
||||
|
||||
def _pure_python_parse(
|
||||
entrypoint: str,
|
||||
source: Union[str, bytes],
|
||||
config: PartialParserConfig,
|
||||
*,
|
||||
detect_trailing_newline: bool,
|
||||
detect_default_newline: bool,
|
||||
) -> CSTNode:
|
||||
detection_result = detect_config(
|
||||
source,
|
||||
partial=config,
|
||||
detect_trailing_newline=detect_trailing_newline,
|
||||
detect_default_newline=detect_default_newline,
|
||||
)
|
||||
validate_grammar()
|
||||
grammar = get_grammar(config.parsed_python_version, config.future_imports)
|
||||
|
||||
parser = PythonCSTParser(
|
||||
tokens=detection_result.tokens,
|
||||
config=detection_result.config,
|
||||
pgen_grammar=grammar,
|
||||
start_nonterminal=entrypoint,
|
||||
)
|
||||
# The parser has an Any return type, we can at least refine it to CSTNode here.
|
||||
result = parser.parse()
|
||||
assert isinstance(result, CSTNode)
|
||||
return result
|
||||
return parse(source_str)
|
||||
|
||||
|
||||
def parse_module(
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ try:
|
|||
ERROR_DEDENT: TokenType = native_token_type.ERROR_DEDENT
|
||||
|
||||
except ImportError:
|
||||
from libcst._parser.parso.python.py_token import ( # noqa F401
|
||||
from libcst._parser.parso.python.py_token import ( # noqa: F401
|
||||
PythonTokenTypes,
|
||||
TokenType,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ from unittest.mock import patch
|
|||
|
||||
import libcst as cst
|
||||
from libcst._nodes.base import CSTValidationError
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.testing.utils import data_provider, UnitTest
|
||||
|
||||
|
||||
|
|
@ -174,8 +173,6 @@ class ParseErrorsTest(UnitTest):
|
|||
parse_fn()
|
||||
# make sure str() doesn't blow up
|
||||
self.assertIn("Syntax Error", str(cm.exception))
|
||||
if not is_native():
|
||||
self.assertEqual(str(cm.exception), expected)
|
||||
|
||||
def test_native_fallible_into_py(self) -> None:
|
||||
with patch("libcst._nodes.expression.Name._validate") as await_validate:
|
||||
|
|
|
|||
|
|
@ -9,4 +9,4 @@ try:
|
|||
|
||||
Token = tokenize.Token
|
||||
except ImportError:
|
||||
from libcst._parser.types.py_token import Token # noqa F401
|
||||
from libcst._parser.types.py_token import Token # noqa: F401
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ if TYPE_CHECKING:
|
|||
BaseExpression,
|
||||
BaseFormattedStringContent,
|
||||
BaseSlice,
|
||||
BaseTemplatedStringContent,
|
||||
BinaryOperation,
|
||||
BooleanOperation,
|
||||
Call,
|
||||
|
|
@ -71,6 +72,9 @@ if TYPE_CHECKING:
|
|||
StarredElement,
|
||||
Subscript,
|
||||
SubscriptElement,
|
||||
TemplatedString,
|
||||
TemplatedStringExpression,
|
||||
TemplatedStringText,
|
||||
Tuple,
|
||||
UnaryOperation,
|
||||
Yield,
|
||||
|
|
@ -5182,6 +5186,140 @@ class CSTTypedBaseFunctions:
|
|||
def leave_SubtractAssign_whitespace_after(self, node: "SubtractAssign") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedString(self, node: "TemplatedString") -> Optional[bool]:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedString_parts(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedString_parts(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedString_start(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedString_start(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedString_end(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedString_end(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedString_lpar(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedString_lpar(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedString_rpar(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedString_rpar(self, node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedStringExpression(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> Optional[bool]:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedStringExpression_expression(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringExpression_expression(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedStringExpression_conversion(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringExpression_conversion(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedStringExpression_format_spec(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringExpression_format_spec(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedStringExpression_whitespace_before_expression(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringExpression_whitespace_before_expression(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedStringExpression_whitespace_after_expression(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringExpression_whitespace_after_expression(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedStringExpression_equal(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringExpression_equal(
|
||||
self, node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedStringText(self, node: "TemplatedStringText") -> Optional[bool]:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TemplatedStringText_value(self, node: "TemplatedStringText") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringText_value(self, node: "TemplatedStringText") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def visit_TrailingWhitespace(self, node: "TrailingWhitespace") -> Optional[bool]:
|
||||
pass
|
||||
|
|
@ -6385,6 +6523,20 @@ class CSTTypedVisitorFunctions(CSTTypedBaseFunctions):
|
|||
def leave_SubtractAssign(self, original_node: "SubtractAssign") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedString(self, original_node: "TemplatedString") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringExpression(
|
||||
self, original_node: "TemplatedStringExpression"
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringText(self, original_node: "TemplatedStringText") -> None:
|
||||
pass
|
||||
|
||||
@mark_no_op
|
||||
def leave_TrailingWhitespace(self, original_node: "TrailingWhitespace") -> None:
|
||||
pass
|
||||
|
|
@ -7402,6 +7554,34 @@ class CSTTypedTransformerFunctions(CSTTypedBaseFunctions):
|
|||
) -> "BaseAugOp":
|
||||
return updated_node
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedString(
|
||||
self, original_node: "TemplatedString", updated_node: "TemplatedString"
|
||||
) -> "BaseExpression":
|
||||
return updated_node
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringExpression(
|
||||
self,
|
||||
original_node: "TemplatedStringExpression",
|
||||
updated_node: "TemplatedStringExpression",
|
||||
) -> Union[
|
||||
"BaseTemplatedStringContent",
|
||||
FlattenSentinel["BaseTemplatedStringContent"],
|
||||
RemovalSentinel,
|
||||
]:
|
||||
return updated_node
|
||||
|
||||
@mark_no_op
|
||||
def leave_TemplatedStringText(
|
||||
self, original_node: "TemplatedStringText", updated_node: "TemplatedStringText"
|
||||
) -> Union[
|
||||
"BaseTemplatedStringContent",
|
||||
FlattenSentinel["BaseTemplatedStringContent"],
|
||||
RemovalSentinel,
|
||||
]:
|
||||
return updated_node
|
||||
|
||||
@mark_no_op
|
||||
def leave_TrailingWhitespace(
|
||||
self, original_node: "TrailingWhitespace", updated_node: "TrailingWhitespace"
|
||||
|
|
|
|||
|
|
@ -3,12 +3,14 @@
|
|||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
#
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import inspect
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Generator, List, Type, TypeVar
|
||||
from typing import Dict, Generator, List, Tuple, Type, TypeVar
|
||||
|
||||
from libcst import Module
|
||||
from libcst import CSTNode, Module
|
||||
from libcst.codemod._codemod import Codemod
|
||||
from libcst.codemod._context import CodemodContext
|
||||
from libcst.codemod._visitor import ContextAwareTransformer
|
||||
|
|
@ -65,6 +67,28 @@ class CodemodCommand(Codemod, ABC):
|
|||
"""
|
||||
...
|
||||
|
||||
# Lightweight wrappers for RemoveImportsVisitor static functions
|
||||
def remove_unused_import(
|
||||
self,
|
||||
module: str,
|
||||
obj: str | None = None,
|
||||
asname: str | None = None,
|
||||
) -> None:
|
||||
RemoveImportsVisitor.remove_unused_import(self.context, module, obj, asname)
|
||||
|
||||
def remove_unused_import_by_node(self, node: CSTNode) -> None:
|
||||
RemoveImportsVisitor.remove_unused_import_by_node(self.context, node)
|
||||
|
||||
# Lightweight wrappers for AddImportsVisitor static functions
|
||||
def add_needed_import(
|
||||
self,
|
||||
module: str,
|
||||
obj: str | None = None,
|
||||
asname: str | None = None,
|
||||
relative: int = 0,
|
||||
) -> None:
|
||||
AddImportsVisitor.add_needed_import(self.context, module, obj, asname, relative)
|
||||
|
||||
def transform_module(self, tree: Module) -> Module:
|
||||
# Overrides (but then calls) Codemod's transform_module to provide
|
||||
# a spot where additional supported transforms can be attached and run.
|
||||
|
|
@ -75,13 +99,13 @@ class CodemodCommand(Codemod, ABC):
|
|||
# have a static method that other transforms can use which takes
|
||||
# a context and other optional args and modifies its own context key
|
||||
# accordingly. We import them here so that we don't have circular imports.
|
||||
supported_transforms: Dict[str, Type[Codemod]] = {
|
||||
AddImportsVisitor.CONTEXT_KEY: AddImportsVisitor,
|
||||
RemoveImportsVisitor.CONTEXT_KEY: RemoveImportsVisitor,
|
||||
}
|
||||
supported_transforms: List[Tuple[str, Type[Codemod]]] = [
|
||||
(AddImportsVisitor.CONTEXT_KEY, AddImportsVisitor),
|
||||
(RemoveImportsVisitor.CONTEXT_KEY, RemoveImportsVisitor),
|
||||
]
|
||||
|
||||
# For any visitors that we support auto-running, run them here if needed.
|
||||
for key, transform in supported_transforms.items():
|
||||
for key, transform in supported_transforms:
|
||||
if key in self.context.scratch:
|
||||
# We have work to do, so lets run this.
|
||||
tree = self._instantiate_and_run(transform, tree)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ import tempfile
|
|||
from pathlib import Path
|
||||
from unittest import skipIf
|
||||
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.codemod import CodemodTest
|
||||
from libcst.testing.utils import UnitTest
|
||||
|
||||
|
|
@ -37,16 +36,10 @@ class TestCodemodCLI(UnitTest):
|
|||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
if not is_native():
|
||||
self.assertIn(
|
||||
"ParserSyntaxError: Syntax Error @ 14:11.",
|
||||
rlt.stderr.decode("utf-8"),
|
||||
)
|
||||
else:
|
||||
self.assertIn(
|
||||
"error: cannot format -: Cannot parse for target version Python 3.6: 13:10: async with AsyncExitStack() as stack:",
|
||||
rlt.stderr.decode("utf-8"),
|
||||
)
|
||||
self.assertIn(
|
||||
"error: cannot format -: Cannot parse for target version Python 3.6: 13:10: async with AsyncExitStack() as stack:",
|
||||
rlt.stderr.decode("utf-8"),
|
||||
)
|
||||
|
||||
def test_codemod_external(self) -> None:
|
||||
# Test running the NOOP command as an "external command"
|
||||
|
|
|
|||
325
libcst/codemod/tests/test_command_helpers.py
Normal file
325
libcst/codemod/tests/test_command_helpers.py
Normal file
|
|
@ -0,0 +1,325 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
#
|
||||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
#
|
||||
from typing import Union
|
||||
|
||||
import libcst as cst
|
||||
from libcst.codemod import CodemodTest, VisitorBasedCodemodCommand
|
||||
|
||||
|
||||
class TestRemoveUnusedImportHelper(CodemodTest):
|
||||
"""Tests for the remove_unused_import helper method in CodemodCommand."""
|
||||
|
||||
def test_remove_unused_import_simple(self) -> None:
|
||||
"""
|
||||
Test that remove_unused_import helper method works correctly.
|
||||
"""
|
||||
|
||||
class RemoveBarImport(VisitorBasedCodemodCommand):
|
||||
def visit_Module(self, node: cst.Module) -> None:
|
||||
# Use the helper method to schedule removal
|
||||
self.remove_unused_import("bar")
|
||||
|
||||
before = """
|
||||
import bar
|
||||
import baz
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
after = """
|
||||
import baz
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
|
||||
self.TRANSFORM = RemoveBarImport
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
def test_remove_unused_import_from_simple(self) -> None:
|
||||
"""
|
||||
Test that remove_unused_import helper method works correctly with from imports.
|
||||
"""
|
||||
|
||||
class RemoveBarFromImport(VisitorBasedCodemodCommand):
|
||||
def visit_Module(self, node: cst.Module) -> None:
|
||||
# Use the helper method to schedule removal
|
||||
self.remove_unused_import("a.b.c", "bar")
|
||||
|
||||
before = """
|
||||
from a.b.c import bar, baz
|
||||
|
||||
def foo() -> None:
|
||||
baz()
|
||||
"""
|
||||
after = """
|
||||
from a.b.c import baz
|
||||
|
||||
def foo() -> None:
|
||||
baz()
|
||||
"""
|
||||
|
||||
self.TRANSFORM = RemoveBarFromImport
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
def test_remove_unused_import_with_alias(self) -> None:
|
||||
"""
|
||||
Test that remove_unused_import helper method works correctly with aliased imports.
|
||||
"""
|
||||
|
||||
class RemoveBarAsQuxImport(VisitorBasedCodemodCommand):
|
||||
def visit_Module(self, node: cst.Module) -> None:
|
||||
# Use the helper method to schedule removal
|
||||
self.remove_unused_import("a.b.c", "bar", "qux")
|
||||
|
||||
before = """
|
||||
from a.b.c import bar as qux, baz
|
||||
|
||||
def foo() -> None:
|
||||
baz()
|
||||
"""
|
||||
after = """
|
||||
from a.b.c import baz
|
||||
|
||||
def foo() -> None:
|
||||
baz()
|
||||
"""
|
||||
|
||||
self.TRANSFORM = RemoveBarAsQuxImport
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
|
||||
class TestRemoveUnusedImportByNodeHelper(CodemodTest):
|
||||
"""Tests for the remove_unused_import_by_node helper method in CodemodCommand."""
|
||||
|
||||
def test_remove_unused_import_by_node_simple(self) -> None:
|
||||
"""
|
||||
Test that remove_unused_import_by_node helper method works correctly.
|
||||
"""
|
||||
|
||||
class RemoveBarCallAndImport(VisitorBasedCodemodCommand):
|
||||
METADATA_DEPENDENCIES = (
|
||||
cst.metadata.QualifiedNameProvider,
|
||||
cst.metadata.ScopeProvider,
|
||||
)
|
||||
|
||||
def leave_SimpleStatementLine(
|
||||
self,
|
||||
original_node: cst.SimpleStatementLine,
|
||||
updated_node: cst.SimpleStatementLine,
|
||||
) -> Union[cst.RemovalSentinel, cst.SimpleStatementLine]:
|
||||
# Remove any statement that calls bar()
|
||||
if cst.matchers.matches(
|
||||
updated_node,
|
||||
cst.matchers.SimpleStatementLine(
|
||||
body=[cst.matchers.Expr(cst.matchers.Call())]
|
||||
),
|
||||
):
|
||||
call = cst.ensure_type(updated_node.body[0], cst.Expr).value
|
||||
if cst.matchers.matches(
|
||||
call, cst.matchers.Call(func=cst.matchers.Name("bar"))
|
||||
):
|
||||
# Use the helper method to remove imports referenced by this node
|
||||
self.remove_unused_import_by_node(original_node)
|
||||
return cst.RemoveFromParent()
|
||||
return updated_node
|
||||
|
||||
before = """
|
||||
from foo import bar, baz
|
||||
|
||||
def fun() -> None:
|
||||
bar()
|
||||
baz()
|
||||
"""
|
||||
after = """
|
||||
from foo import baz
|
||||
|
||||
def fun() -> None:
|
||||
baz()
|
||||
"""
|
||||
|
||||
self.TRANSFORM = RemoveBarCallAndImport
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
|
||||
class TestAddNeededImportHelper(CodemodTest):
|
||||
"""Tests for the add_needed_import helper method in CodemodCommand."""
|
||||
|
||||
def test_add_needed_import_simple(self) -> None:
|
||||
"""
|
||||
Test that add_needed_import helper method works correctly.
|
||||
"""
|
||||
|
||||
class AddBarImport(VisitorBasedCodemodCommand):
|
||||
def visit_Module(self, node: cst.Module) -> None:
|
||||
# Use the helper method to schedule import addition
|
||||
self.add_needed_import("bar")
|
||||
|
||||
before = """
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
after = """
|
||||
import bar
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
|
||||
self.TRANSFORM = AddBarImport
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
def test_add_needed_import_from_simple(self) -> None:
|
||||
"""
|
||||
Test that add_needed_import helper method works correctly with from imports.
|
||||
"""
|
||||
|
||||
class AddBarFromImport(VisitorBasedCodemodCommand):
|
||||
def visit_Module(self, node: cst.Module) -> None:
|
||||
# Use the helper method to schedule import addition
|
||||
self.add_needed_import("a.b.c", "bar")
|
||||
|
||||
before = """
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
after = """
|
||||
from a.b.c import bar
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
|
||||
self.TRANSFORM = AddBarFromImport
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
def test_add_needed_import_with_alias(self) -> None:
|
||||
"""
|
||||
Test that add_needed_import helper method works correctly with aliased imports.
|
||||
"""
|
||||
|
||||
class AddBarAsQuxImport(VisitorBasedCodemodCommand):
|
||||
def visit_Module(self, node: cst.Module) -> None:
|
||||
# Use the helper method to schedule import addition
|
||||
self.add_needed_import("a.b.c", "bar", "qux")
|
||||
|
||||
before = """
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
after = """
|
||||
from a.b.c import bar as qux
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
|
||||
self.TRANSFORM = AddBarAsQuxImport
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
def test_add_needed_import_relative(self) -> None:
|
||||
"""
|
||||
Test that add_needed_import helper method works correctly with relative imports.
|
||||
"""
|
||||
|
||||
class AddRelativeImport(VisitorBasedCodemodCommand):
|
||||
def visit_Module(self, node: cst.Module) -> None:
|
||||
# Use the helper method to schedule relative import addition
|
||||
self.add_needed_import("c", "bar", relative=2)
|
||||
|
||||
before = """
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
after = """
|
||||
from ..c import bar
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
|
||||
self.TRANSFORM = AddRelativeImport
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
|
||||
class TestCombinedHelpers(CodemodTest):
|
||||
"""Tests for combining add_needed_import and remove_unused_import helper methods."""
|
||||
|
||||
def test_add_and_remove_imports(self) -> None:
|
||||
"""
|
||||
Test that both helper methods work correctly when used together.
|
||||
"""
|
||||
|
||||
class ReplaceBarWithBaz(VisitorBasedCodemodCommand):
|
||||
def visit_Module(self, node: cst.Module) -> None:
|
||||
# Add new import and remove old one
|
||||
self.add_needed_import("new_module", "baz")
|
||||
self.remove_unused_import("old_module", "bar")
|
||||
|
||||
before = """
|
||||
from other_module import qux
|
||||
from old_module import bar
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
after = """
|
||||
from other_module import qux
|
||||
from new_module import baz
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
|
||||
self.TRANSFORM = ReplaceBarWithBaz
|
||||
self.assertCodemod(before, after)
|
||||
|
||||
def test_add_and_remove_same_import(self) -> None:
|
||||
"""
|
||||
Test that both helper methods work correctly when used together.
|
||||
"""
|
||||
|
||||
class AddAndRemoveBar(VisitorBasedCodemodCommand):
|
||||
def visit_Module(self, node: cst.Module) -> None:
|
||||
# Add new import and remove old one
|
||||
self.add_needed_import("hello_module", "bar")
|
||||
self.remove_unused_import("hello_module", "bar")
|
||||
|
||||
self.TRANSFORM = AddAndRemoveBar
|
||||
|
||||
before = """
|
||||
from other_module import baz
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
# Should remain unchanged
|
||||
self.assertCodemod(before, before)
|
||||
|
||||
before = """
|
||||
from other_module import baz
|
||||
from hello_module import bar
|
||||
|
||||
def foo() -> None:
|
||||
bar.func()
|
||||
"""
|
||||
self.assertCodemod(before, before)
|
||||
|
||||
before = """
|
||||
from other_module import baz
|
||||
from hello_module import bar
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
|
||||
after = """
|
||||
from other_module import baz
|
||||
|
||||
def foo() -> None:
|
||||
pass
|
||||
"""
|
||||
self.assertCodemod(before, after)
|
||||
|
|
@ -534,15 +534,20 @@ class _TypeCollectorDequalifier(cst.CSTTransformer):
|
|||
def __init__(self, type_collector: "TypeCollector") -> None:
|
||||
self.type_collector = type_collector
|
||||
|
||||
def leave_Name(self, original_node: cst.Name, updated_node: cst.Name) -> cst.Name:
|
||||
def leave_Name(
|
||||
self, original_node: cst.Name, updated_node: cst.Name
|
||||
) -> NameOrAttribute:
|
||||
qualified_name = _get_unique_qualified_name(self.type_collector, original_node)
|
||||
should_qualify = self.type_collector._handle_qualification_and_should_qualify(
|
||||
qualified_name, original_node
|
||||
)
|
||||
self.type_collector.annotations.names.add(qualified_name)
|
||||
if should_qualify:
|
||||
qualified_node = cst.parse_module(qualified_name)
|
||||
return qualified_node # pyre-ignore[7]
|
||||
parts = qualified_name.split(".")
|
||||
qualified_node = cst.Name(parts[0])
|
||||
for p in parts[1:]:
|
||||
qualified_node = cst.Attribute(qualified_node, cst.Name(p))
|
||||
return qualified_node
|
||||
else:
|
||||
return original_node
|
||||
|
||||
|
|
|
|||
|
|
@ -61,6 +61,28 @@ class TestApplyAnnotationsVisitor(CodemodTest):
|
|||
)
|
||||
self.assertCodemod(before, after, context_override=context)
|
||||
|
||||
def run_test_case_twice(
|
||||
self,
|
||||
stub: str,
|
||||
before: str,
|
||||
after: str,
|
||||
) -> None:
|
||||
context = CodemodContext()
|
||||
ApplyTypeAnnotationsVisitor.store_stub_in_context(
|
||||
context, parse_module(textwrap.dedent(stub.rstrip()))
|
||||
)
|
||||
r1 = ApplyTypeAnnotationsVisitor(context).transform_module(
|
||||
parse_module(textwrap.dedent(before.rstrip()))
|
||||
)
|
||||
|
||||
context = CodemodContext()
|
||||
ApplyTypeAnnotationsVisitor.store_stub_in_context(
|
||||
context, parse_module(textwrap.dedent(stub.rstrip()))
|
||||
)
|
||||
r2 = ApplyTypeAnnotationsVisitor(context).transform_module(r1)
|
||||
assert r1.code == textwrap.dedent(after.rstrip())
|
||||
assert r2.code == textwrap.dedent(after.rstrip())
|
||||
|
||||
@data_provider(
|
||||
{
|
||||
"simple": (
|
||||
|
|
@ -1965,3 +1987,29 @@ class TestApplyAnnotationsVisitor(CodemodTest):
|
|||
)
|
||||
def test_no_duplicate_annotations(self, stub: str, before: str, after: str) -> None:
|
||||
self.run_simple_test_case(stub=stub, before=before, after=after)
|
||||
|
||||
@data_provider(
|
||||
{
|
||||
"qualifier_jank": (
|
||||
"""
|
||||
from module.submodule import B
|
||||
M: B
|
||||
class Foo: ...
|
||||
""",
|
||||
"""
|
||||
from module import B
|
||||
M = B()
|
||||
class Foo: pass
|
||||
""",
|
||||
"""
|
||||
from module import B
|
||||
import module.submodule
|
||||
|
||||
M: module.submodule.B = B()
|
||||
class Foo: pass
|
||||
""",
|
||||
),
|
||||
}
|
||||
)
|
||||
def test_idempotent(self, stub: str, before: str, after: str) -> None:
|
||||
self.run_test_case_twice(stub=stub, before=before, after=after)
|
||||
|
|
|
|||
|
|
@ -142,6 +142,10 @@ class BaseSuite(_NodeABC):
|
|||
pass
|
||||
|
||||
|
||||
class BaseTemplatedStringContent(_NodeABC):
|
||||
pass
|
||||
|
||||
|
||||
class BaseUnaryOp(_NodeABC):
|
||||
pass
|
||||
|
||||
|
|
@ -14283,6 +14287,375 @@ class SubtractAssign(BaseAugOp, BaseMatcherNode):
|
|||
] = DoNotCare()
|
||||
|
||||
|
||||
BaseTemplatedStringContentMatchType = Union[
|
||||
"BaseTemplatedStringContent",
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[cst.BaseTemplatedStringContent],
|
||||
]
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, unsafe_hash=False)
|
||||
class TemplatedString(BaseExpression, BaseString, BaseMatcherNode):
|
||||
parts: Union[
|
||||
Sequence[
|
||||
Union[
|
||||
BaseTemplatedStringContentMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[BaseTemplatedStringContentMatchType],
|
||||
AllOf[BaseTemplatedStringContentMatchType],
|
||||
AtLeastN[
|
||||
Union[
|
||||
BaseTemplatedStringContentMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[BaseTemplatedStringContentMatchType],
|
||||
AllOf[BaseTemplatedStringContentMatchType],
|
||||
]
|
||||
],
|
||||
AtMostN[
|
||||
Union[
|
||||
BaseTemplatedStringContentMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[BaseTemplatedStringContentMatchType],
|
||||
AllOf[BaseTemplatedStringContentMatchType],
|
||||
]
|
||||
],
|
||||
]
|
||||
],
|
||||
DoNotCareSentinel,
|
||||
MatchIfTrue[Sequence[cst.BaseTemplatedStringContent]],
|
||||
OneOf[
|
||||
Union[
|
||||
Sequence[
|
||||
Union[
|
||||
BaseTemplatedStringContentMatchType,
|
||||
OneOf[BaseTemplatedStringContentMatchType],
|
||||
AllOf[BaseTemplatedStringContentMatchType],
|
||||
AtLeastN[
|
||||
Union[
|
||||
BaseTemplatedStringContentMatchType,
|
||||
OneOf[BaseTemplatedStringContentMatchType],
|
||||
AllOf[BaseTemplatedStringContentMatchType],
|
||||
]
|
||||
],
|
||||
AtMostN[
|
||||
Union[
|
||||
BaseTemplatedStringContentMatchType,
|
||||
OneOf[BaseTemplatedStringContentMatchType],
|
||||
AllOf[BaseTemplatedStringContentMatchType],
|
||||
]
|
||||
],
|
||||
]
|
||||
],
|
||||
MatchIfTrue[Sequence[cst.BaseTemplatedStringContent]],
|
||||
]
|
||||
],
|
||||
AllOf[
|
||||
Union[
|
||||
Sequence[
|
||||
Union[
|
||||
BaseTemplatedStringContentMatchType,
|
||||
OneOf[BaseTemplatedStringContentMatchType],
|
||||
AllOf[BaseTemplatedStringContentMatchType],
|
||||
AtLeastN[
|
||||
Union[
|
||||
BaseTemplatedStringContentMatchType,
|
||||
OneOf[BaseTemplatedStringContentMatchType],
|
||||
AllOf[BaseTemplatedStringContentMatchType],
|
||||
]
|
||||
],
|
||||
AtMostN[
|
||||
Union[
|
||||
BaseTemplatedStringContentMatchType,
|
||||
OneOf[BaseTemplatedStringContentMatchType],
|
||||
AllOf[BaseTemplatedStringContentMatchType],
|
||||
]
|
||||
],
|
||||
]
|
||||
],
|
||||
MatchIfTrue[Sequence[cst.BaseTemplatedStringContent]],
|
||||
]
|
||||
],
|
||||
] = DoNotCare()
|
||||
start: Union[
|
||||
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
|
||||
] = DoNotCare()
|
||||
end: Union[
|
||||
Literal['"', "'", '"""', "'''"],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Literal['"', "'", '"""', "'''"]],
|
||||
DoNotCareSentinel,
|
||||
OneOf[
|
||||
Union[
|
||||
Literal['"', "'", '"""', "'''"],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Literal['"', "'", '"""', "'''"]],
|
||||
]
|
||||
],
|
||||
AllOf[
|
||||
Union[
|
||||
Literal['"', "'", '"""', "'''"],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Literal['"', "'", '"""', "'''"]],
|
||||
]
|
||||
],
|
||||
] = DoNotCare()
|
||||
lpar: Union[
|
||||
Sequence[
|
||||
Union[
|
||||
LeftParenMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[LeftParenMatchType],
|
||||
AllOf[LeftParenMatchType],
|
||||
AtLeastN[
|
||||
Union[
|
||||
LeftParenMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[LeftParenMatchType],
|
||||
AllOf[LeftParenMatchType],
|
||||
]
|
||||
],
|
||||
AtMostN[
|
||||
Union[
|
||||
LeftParenMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[LeftParenMatchType],
|
||||
AllOf[LeftParenMatchType],
|
||||
]
|
||||
],
|
||||
]
|
||||
],
|
||||
DoNotCareSentinel,
|
||||
MatchIfTrue[Sequence[cst.LeftParen]],
|
||||
OneOf[
|
||||
Union[
|
||||
Sequence[
|
||||
Union[
|
||||
LeftParenMatchType,
|
||||
OneOf[LeftParenMatchType],
|
||||
AllOf[LeftParenMatchType],
|
||||
AtLeastN[
|
||||
Union[
|
||||
LeftParenMatchType,
|
||||
OneOf[LeftParenMatchType],
|
||||
AllOf[LeftParenMatchType],
|
||||
]
|
||||
],
|
||||
AtMostN[
|
||||
Union[
|
||||
LeftParenMatchType,
|
||||
OneOf[LeftParenMatchType],
|
||||
AllOf[LeftParenMatchType],
|
||||
]
|
||||
],
|
||||
]
|
||||
],
|
||||
MatchIfTrue[Sequence[cst.LeftParen]],
|
||||
]
|
||||
],
|
||||
AllOf[
|
||||
Union[
|
||||
Sequence[
|
||||
Union[
|
||||
LeftParenMatchType,
|
||||
OneOf[LeftParenMatchType],
|
||||
AllOf[LeftParenMatchType],
|
||||
AtLeastN[
|
||||
Union[
|
||||
LeftParenMatchType,
|
||||
OneOf[LeftParenMatchType],
|
||||
AllOf[LeftParenMatchType],
|
||||
]
|
||||
],
|
||||
AtMostN[
|
||||
Union[
|
||||
LeftParenMatchType,
|
||||
OneOf[LeftParenMatchType],
|
||||
AllOf[LeftParenMatchType],
|
||||
]
|
||||
],
|
||||
]
|
||||
],
|
||||
MatchIfTrue[Sequence[cst.LeftParen]],
|
||||
]
|
||||
],
|
||||
] = DoNotCare()
|
||||
rpar: Union[
|
||||
Sequence[
|
||||
Union[
|
||||
RightParenMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[RightParenMatchType],
|
||||
AllOf[RightParenMatchType],
|
||||
AtLeastN[
|
||||
Union[
|
||||
RightParenMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[RightParenMatchType],
|
||||
AllOf[RightParenMatchType],
|
||||
]
|
||||
],
|
||||
AtMostN[
|
||||
Union[
|
||||
RightParenMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[RightParenMatchType],
|
||||
AllOf[RightParenMatchType],
|
||||
]
|
||||
],
|
||||
]
|
||||
],
|
||||
DoNotCareSentinel,
|
||||
MatchIfTrue[Sequence[cst.RightParen]],
|
||||
OneOf[
|
||||
Union[
|
||||
Sequence[
|
||||
Union[
|
||||
RightParenMatchType,
|
||||
OneOf[RightParenMatchType],
|
||||
AllOf[RightParenMatchType],
|
||||
AtLeastN[
|
||||
Union[
|
||||
RightParenMatchType,
|
||||
OneOf[RightParenMatchType],
|
||||
AllOf[RightParenMatchType],
|
||||
]
|
||||
],
|
||||
AtMostN[
|
||||
Union[
|
||||
RightParenMatchType,
|
||||
OneOf[RightParenMatchType],
|
||||
AllOf[RightParenMatchType],
|
||||
]
|
||||
],
|
||||
]
|
||||
],
|
||||
MatchIfTrue[Sequence[cst.RightParen]],
|
||||
]
|
||||
],
|
||||
AllOf[
|
||||
Union[
|
||||
Sequence[
|
||||
Union[
|
||||
RightParenMatchType,
|
||||
OneOf[RightParenMatchType],
|
||||
AllOf[RightParenMatchType],
|
||||
AtLeastN[
|
||||
Union[
|
||||
RightParenMatchType,
|
||||
OneOf[RightParenMatchType],
|
||||
AllOf[RightParenMatchType],
|
||||
]
|
||||
],
|
||||
AtMostN[
|
||||
Union[
|
||||
RightParenMatchType,
|
||||
OneOf[RightParenMatchType],
|
||||
AllOf[RightParenMatchType],
|
||||
]
|
||||
],
|
||||
]
|
||||
],
|
||||
MatchIfTrue[Sequence[cst.RightParen]],
|
||||
]
|
||||
],
|
||||
] = DoNotCare()
|
||||
metadata: Union[
|
||||
MetadataMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[MetadataMatchType],
|
||||
AllOf[MetadataMatchType],
|
||||
] = DoNotCare()
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, unsafe_hash=False)
|
||||
class TemplatedStringExpression(BaseTemplatedStringContent, BaseMatcherNode):
|
||||
expression: Union[
|
||||
BaseExpressionMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[BaseExpressionMatchType],
|
||||
AllOf[BaseExpressionMatchType],
|
||||
] = DoNotCare()
|
||||
conversion: Union[
|
||||
Optional[str],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Optional[str]],
|
||||
DoNotCareSentinel,
|
||||
OneOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
|
||||
AllOf[Union[Optional[str], MetadataMatchType, MatchIfTrue[Optional[str]]]],
|
||||
] = DoNotCare()
|
||||
format_spec: Union[
|
||||
Optional[Sequence["BaseTemplatedStringContent"]],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Optional[Sequence[cst.BaseTemplatedStringContent]]],
|
||||
DoNotCareSentinel,
|
||||
OneOf[
|
||||
Union[
|
||||
Optional[Sequence["BaseTemplatedStringContent"]],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Optional[Sequence[cst.BaseTemplatedStringContent]]],
|
||||
]
|
||||
],
|
||||
AllOf[
|
||||
Union[
|
||||
Optional[Sequence["BaseTemplatedStringContent"]],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Optional[Sequence[cst.BaseTemplatedStringContent]]],
|
||||
]
|
||||
],
|
||||
] = DoNotCare()
|
||||
whitespace_before_expression: Union[
|
||||
BaseParenthesizableWhitespaceMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[BaseParenthesizableWhitespaceMatchType],
|
||||
AllOf[BaseParenthesizableWhitespaceMatchType],
|
||||
] = DoNotCare()
|
||||
whitespace_after_expression: Union[
|
||||
BaseParenthesizableWhitespaceMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[BaseParenthesizableWhitespaceMatchType],
|
||||
AllOf[BaseParenthesizableWhitespaceMatchType],
|
||||
] = DoNotCare()
|
||||
equal: Union[
|
||||
Optional["AssignEqual"],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Optional[cst.AssignEqual]],
|
||||
DoNotCareSentinel,
|
||||
OneOf[
|
||||
Union[
|
||||
Optional["AssignEqual"],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Optional[cst.AssignEqual]],
|
||||
]
|
||||
],
|
||||
AllOf[
|
||||
Union[
|
||||
Optional["AssignEqual"],
|
||||
MetadataMatchType,
|
||||
MatchIfTrue[Optional[cst.AssignEqual]],
|
||||
]
|
||||
],
|
||||
] = DoNotCare()
|
||||
metadata: Union[
|
||||
MetadataMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[MetadataMatchType],
|
||||
AllOf[MetadataMatchType],
|
||||
] = DoNotCare()
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, unsafe_hash=False)
|
||||
class TemplatedStringText(BaseTemplatedStringContent, BaseMatcherNode):
|
||||
value: Union[
|
||||
strMatchType, DoNotCareSentinel, OneOf[strMatchType], AllOf[strMatchType]
|
||||
] = DoNotCare()
|
||||
metadata: Union[
|
||||
MetadataMatchType,
|
||||
DoNotCareSentinel,
|
||||
OneOf[MetadataMatchType],
|
||||
AllOf[MetadataMatchType],
|
||||
] = DoNotCare()
|
||||
|
||||
|
||||
@dataclass(frozen=True, eq=False, unsafe_hash=False)
|
||||
class TrailingWhitespace(BaseMatcherNode):
|
||||
whitespace: Union[
|
||||
|
|
@ -16122,6 +16495,7 @@ __all__ = [
|
|||
"BaseStatement",
|
||||
"BaseString",
|
||||
"BaseSuite",
|
||||
"BaseTemplatedStringContent",
|
||||
"BaseUnaryOp",
|
||||
"BinaryOperation",
|
||||
"BitAnd",
|
||||
|
|
@ -16274,6 +16648,9 @@ __all__ = [
|
|||
"SubscriptElement",
|
||||
"Subtract",
|
||||
"SubtractAssign",
|
||||
"TemplatedString",
|
||||
"TemplatedStringExpression",
|
||||
"TemplatedStringText",
|
||||
"TrailingWhitespace",
|
||||
"Try",
|
||||
"TryStar",
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ from libcst._nodes.expression import (
|
|||
BaseExpression,
|
||||
BaseFormattedStringContent,
|
||||
BaseSlice,
|
||||
BaseTemplatedStringContent,
|
||||
BinaryOperation,
|
||||
BooleanOperation,
|
||||
Call,
|
||||
|
|
@ -66,6 +67,9 @@ from libcst._nodes.expression import (
|
|||
StarredElement,
|
||||
Subscript,
|
||||
SubscriptElement,
|
||||
TemplatedString,
|
||||
TemplatedStringExpression,
|
||||
TemplatedStringText,
|
||||
Tuple,
|
||||
UnaryOperation,
|
||||
Yield,
|
||||
|
|
@ -358,6 +362,9 @@ TYPED_FUNCTION_RETURN_MAPPING: TypingDict[Type[CSTNode], object] = {
|
|||
SubscriptElement: Union[SubscriptElement, RemovalSentinel],
|
||||
Subtract: BaseBinaryOp,
|
||||
SubtractAssign: BaseAugOp,
|
||||
TemplatedString: BaseExpression,
|
||||
TemplatedStringExpression: Union[BaseTemplatedStringContent, RemovalSentinel],
|
||||
TemplatedStringText: Union[BaseTemplatedStringContent, RemovalSentinel],
|
||||
TrailingWhitespace: TrailingWhitespace,
|
||||
Try: Union[BaseStatement, RemovalSentinel],
|
||||
TryStar: Union[BaseStatement, RemovalSentinel],
|
||||
|
|
|
|||
|
|
@ -83,6 +83,53 @@ class PositionProviderTest(UnitTest):
|
|||
wrapper = MetadataWrapper(parse_module("pass"))
|
||||
wrapper.visit_batched([ABatchable()])
|
||||
|
||||
def test_match_statement_position_metadata(self) -> None:
|
||||
test = self
|
||||
|
||||
class MatchPositionVisitor(CSTVisitor):
|
||||
METADATA_DEPENDENCIES = (PositionProvider,)
|
||||
|
||||
def visit_Match(self, node: cst.Match) -> None:
|
||||
test.assertEqual(
|
||||
self.get_metadata(PositionProvider, node),
|
||||
CodeRange((2, 0), (5, 16)),
|
||||
)
|
||||
|
||||
def visit_MatchCase(self, node: cst.MatchCase) -> None:
|
||||
if (
|
||||
isinstance(node.pattern, cst.MatchAs)
|
||||
and node.pattern.name
|
||||
and node.pattern.name.value == "b"
|
||||
):
|
||||
test.assertEqual(
|
||||
self.get_metadata(PositionProvider, node),
|
||||
CodeRange((3, 4), (3, 16)),
|
||||
)
|
||||
elif (
|
||||
isinstance(node.pattern, cst.MatchAs)
|
||||
and node.pattern.name
|
||||
and node.pattern.name.value == "c"
|
||||
):
|
||||
test.assertEqual(
|
||||
self.get_metadata(PositionProvider, node),
|
||||
CodeRange((4, 4), (4, 16)),
|
||||
)
|
||||
elif isinstance(node.pattern, cst.MatchAs) and not node.pattern.name:
|
||||
test.assertEqual(
|
||||
self.get_metadata(PositionProvider, node),
|
||||
CodeRange((5, 4), (5, 16)),
|
||||
)
|
||||
|
||||
code = """
|
||||
match status:
|
||||
case b: pass
|
||||
case c: pass
|
||||
case _: pass
|
||||
"""
|
||||
|
||||
wrapper = MetadataWrapper(parse_module(code))
|
||||
wrapper.visit(MatchPositionVisitor())
|
||||
|
||||
|
||||
class PositionProvidingCodegenStateTest(UnitTest):
|
||||
def test_codegen_initial_position(self) -> None:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ from unittest import mock
|
|||
|
||||
import libcst as cst
|
||||
from libcst import ensure_type
|
||||
from libcst._parser.entrypoints import is_native
|
||||
from libcst.metadata import MetadataWrapper
|
||||
from libcst.metadata.scope_provider import (
|
||||
_gen_dotted_names,
|
||||
|
|
@ -2029,8 +2028,6 @@ class ScopeProviderTest(UnitTest):
|
|||
)
|
||||
|
||||
def test_type_alias_scope(self) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("type aliases are only supported in the native parser")
|
||||
m, scopes = get_scope_metadata_provider(
|
||||
"""
|
||||
type A = C
|
||||
|
|
@ -2052,8 +2049,6 @@ class ScopeProviderTest(UnitTest):
|
|||
self.assertIsInstance(scopes[alias.value], AnnotationScope)
|
||||
|
||||
def test_type_alias_param(self) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("type parameters are only supported in the native parser")
|
||||
m, scopes = get_scope_metadata_provider(
|
||||
"""
|
||||
B = int
|
||||
|
|
@ -2084,8 +2079,6 @@ class ScopeProviderTest(UnitTest):
|
|||
)
|
||||
|
||||
def test_type_alias_tuple_and_paramspec(self) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("type parameters are only supported in the native parser")
|
||||
m, scopes = get_scope_metadata_provider(
|
||||
"""
|
||||
type A[*T] = T
|
||||
|
|
@ -2113,8 +2106,6 @@ class ScopeProviderTest(UnitTest):
|
|||
self.assertEqual(t_refs[0].node, alias_paramspec.value)
|
||||
|
||||
def test_class_type_params(self) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("type parameters are only supported in the native parser")
|
||||
m, scopes = get_scope_metadata_provider(
|
||||
"""
|
||||
class W[T]:
|
||||
|
|
@ -2149,8 +2140,6 @@ class ScopeProviderTest(UnitTest):
|
|||
self.assertEqual(t_refs_in_g[0].node, g.returns.annotation)
|
||||
|
||||
def test_nested_class_type_params(self) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("type parameters are only supported in the native parser")
|
||||
m, scopes = get_scope_metadata_provider(
|
||||
"""
|
||||
class Outer:
|
||||
|
|
@ -2168,8 +2157,6 @@ class ScopeProviderTest(UnitTest):
|
|||
)
|
||||
|
||||
def test_annotation_refers_to_nested_class(self) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("type parameters are only supported in the native parser")
|
||||
m, scopes = get_scope_metadata_provider(
|
||||
"""
|
||||
class Outer:
|
||||
|
|
@ -2229,8 +2216,6 @@ class ScopeProviderTest(UnitTest):
|
|||
)
|
||||
|
||||
def test_body_isnt_subject_to_special_annotation_rule(self) -> None:
|
||||
if not is_native():
|
||||
self.skipTest("type parameters are only supported in the native parser")
|
||||
m, scopes = get_scope_metadata_provider(
|
||||
"""
|
||||
class Outer:
|
||||
|
|
|
|||
|
|
@ -5,11 +5,6 @@
|
|||
|
||||
from unittest import main
|
||||
|
||||
from libcst._parser.entrypoints import is_native
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser_type = "native" if is_native() else "pure"
|
||||
print(f"running tests with {parser_type!r} parser")
|
||||
|
||||
main(module=None, verbosity=2)
|
||||
|
|
|
|||
12
libcst/tests/test_import.py
Normal file
12
libcst/tests/test_import.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
#
|
||||
# This source code is licensed under the MIT license found in the
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
|
||||
class TestImport(TestCase):
|
||||
def test_import_libcst(self) -> None:
|
||||
import libcst # noqa: F401
|
||||
|
|
@ -8,7 +8,7 @@ from pathlib import Path
|
|||
from unittest import TestCase
|
||||
|
||||
from libcst import CSTTransformer, parse_module
|
||||
from libcst._parser.entrypoints import is_native
|
||||
|
||||
|
||||
fixtures: Path = Path(__file__).parent.parent.parent / "native/libcst/tests/fixtures"
|
||||
|
||||
|
|
@ -19,8 +19,6 @@ class NOOPTransformer(CSTTransformer):
|
|||
|
||||
class RoundTripTests(TestCase):
|
||||
def _get_fixtures(self) -> list[Path]:
|
||||
if not is_native():
|
||||
self.skipTest("pure python parser doesn't work with this")
|
||||
self.assertTrue(fixtures.exists(), f"{fixtures} should exist")
|
||||
files = list(fixtures.iterdir())
|
||||
self.assertGreater(len(files), 0)
|
||||
|
|
|
|||
41
native/Cargo.lock
generated
41
native/Cargo.lock
generated
|
|
@ -286,7 +286,7 @@ checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b"
|
|||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "1.8.2"
|
||||
version = "1.8.6"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"criterion",
|
||||
|
|
@ -304,7 +304,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libcst_derive"
|
||||
version = "1.8.2"
|
||||
version = "1.8.6"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
|
|
@ -355,9 +355,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.16.0"
|
||||
version = "1.21.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860"
|
||||
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
|
||||
|
||||
[[package]]
|
||||
name = "oorandom"
|
||||
|
|
@ -443,9 +443,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.25.0"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f239d656363bcee73afef85277f1b281e8ac6212a1d42aa90e55b90ed43c47a4"
|
||||
checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383"
|
||||
dependencies = [
|
||||
"indoc",
|
||||
"libc",
|
||||
|
|
@ -460,19 +460,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.25.0"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "755ea671a1c34044fa165247aaf6f419ca39caa6003aee791a0df2713d8f1b6d"
|
||||
checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.25.0"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc95a2e67091e44791d4ea300ff744be5293f394f1bafd9f78c080814d35956e"
|
||||
checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
|
|
@ -480,9 +479,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.25.0"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a179641d1b93920829a62f15e87c0ed791b6c8db2271ba0fd7c2686090510214"
|
||||
checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
|
|
@ -492,9 +491,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.25.0"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9dff85ebcaab8c441b0e3f7ae40a6963ecea8a9f5e74f647e33fcf5ec9a1e89e"
|
||||
checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
|
@ -514,9 +513,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rayon"
|
||||
version = "1.10.0"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
|
||||
checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f"
|
||||
dependencies = [
|
||||
"either",
|
||||
"rayon-core",
|
||||
|
|
@ -524,9 +523,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rayon-core"
|
||||
version = "1.12.1"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
|
||||
checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91"
|
||||
dependencies = [
|
||||
"crossbeam-deque",
|
||||
"crossbeam-utils",
|
||||
|
|
@ -534,9 +533,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.1"
|
||||
version = "1.11.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
||||
checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
[package]
|
||||
name = "libcst"
|
||||
version = "1.8.2"
|
||||
version = "1.8.6"
|
||||
authors = ["LibCST Developers"]
|
||||
edition = "2018"
|
||||
rust-version = "1.70"
|
||||
|
|
@ -36,18 +36,18 @@ trace = ["peg/trace"]
|
|||
|
||||
[dependencies]
|
||||
paste = "1.0.15"
|
||||
pyo3 = { version = "0.25", optional = true }
|
||||
pyo3 = { version = "0.26", optional = true }
|
||||
thiserror = "2.0.12"
|
||||
peg = "0.8.5"
|
||||
annotate-snippets = "0.11.5"
|
||||
regex = "1.11.1"
|
||||
regex = "1.11.2"
|
||||
memchr = "2.7.4"
|
||||
libcst_derive = { path = "../libcst_derive", version = "1.8.2" }
|
||||
libcst_derive = { path = "../libcst_derive", version = "1.8.6" }
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
||||
difference = "2.0.0"
|
||||
rayon = "1.10.0"
|
||||
rayon = "1.11.0"
|
||||
itertools = "0.14.0"
|
||||
|
||||
[[bench]]
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>> {
|
|||
text,
|
||||
&TokConfig {
|
||||
async_hacks: false,
|
||||
split_fstring: true,
|
||||
split_ftstring: true,
|
||||
},
|
||||
);
|
||||
|
||||
|
|
@ -191,4 +191,23 @@ mod test {
|
|||
assert_eq!(11, bol_offset("hello\nhello", 3));
|
||||
assert_eq!(12, bol_offset("hello\nhello\nhello", 3));
|
||||
}
|
||||
#[test]
|
||||
fn test_tstring_basic() {
|
||||
assert!(
|
||||
parse_module("t'hello'", None).is_ok(),
|
||||
"Failed to parse t'hello'"
|
||||
);
|
||||
assert!(
|
||||
parse_module("t'{hello}'", None).is_ok(),
|
||||
"Failed to parse t'{{hello}}'"
|
||||
);
|
||||
assert!(
|
||||
parse_module("t'{hello:r}'", None).is_ok(),
|
||||
"Failed to parse t'{{hello:r}}'"
|
||||
);
|
||||
assert!(
|
||||
parse_module("f'line1\\n{hello:r}\\nline2'", None).is_ok(),
|
||||
"Failed to parse t'line1\\n{{hello:r}}\\nline2'"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -474,6 +474,7 @@ pub enum Expression<'a> {
|
|||
SimpleString(Box<SimpleString<'a>>),
|
||||
ConcatenatedString(Box<ConcatenatedString<'a>>),
|
||||
FormattedString(Box<FormattedString<'a>>),
|
||||
TemplatedString(Box<TemplatedString<'a>>),
|
||||
NamedExpr(Box<NamedExpr<'a>>),
|
||||
}
|
||||
|
||||
|
|
@ -2249,6 +2250,7 @@ pub enum String<'a> {
|
|||
Simple(SimpleString<'a>),
|
||||
Concatenated(ConcatenatedString<'a>),
|
||||
Formatted(FormattedString<'a>),
|
||||
Templated(TemplatedString<'a>),
|
||||
}
|
||||
|
||||
impl<'r, 'a> std::convert::From<DeflatedString<'r, 'a>> for DeflatedExpression<'r, 'a> {
|
||||
|
|
@ -2257,6 +2259,7 @@ impl<'r, 'a> std::convert::From<DeflatedString<'r, 'a>> for DeflatedExpression<'
|
|||
DeflatedString::Simple(s) => Self::SimpleString(Box::new(s)),
|
||||
DeflatedString::Concatenated(s) => Self::ConcatenatedString(Box::new(s)),
|
||||
DeflatedString::Formatted(s) => Self::FormattedString(Box::new(s)),
|
||||
DeflatedString::Templated(s) => Self::TemplatedString(Box::new(s)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2334,6 +2337,142 @@ impl<'a> Codegen<'a> for SimpleString<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[cst_node]
|
||||
pub struct TemplatedStringText<'a> {
|
||||
pub value: &'a str,
|
||||
}
|
||||
|
||||
impl<'r, 'a> Inflate<'a> for DeflatedTemplatedStringText<'r, 'a> {
|
||||
type Inflated = TemplatedStringText<'a>;
|
||||
fn inflate(self, _config: &Config<'a>) -> Result<Self::Inflated> {
|
||||
Ok(Self::Inflated { value: self.value })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for TemplatedStringText<'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
state.add_token(self.value);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn make_tstringtext<'r, 'a>(value: &'a str) -> DeflatedTemplatedStringText<'r, 'a> {
|
||||
DeflatedTemplatedStringText {
|
||||
value,
|
||||
_phantom: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cst_node]
|
||||
pub struct TemplatedStringExpression<'a> {
|
||||
// This represents the part of a t-string that is insde the brackets '{' and '}'.
|
||||
pub expression: Expression<'a>,
|
||||
pub conversion: Option<&'a str>,
|
||||
pub format_spec: Option<Vec<TemplatedStringContent<'a>>>,
|
||||
pub whitespace_before_expression: ParenthesizableWhitespace<'a>,
|
||||
pub whitespace_after_expression: ParenthesizableWhitespace<'a>,
|
||||
pub equal: Option<AssignEqual<'a>>,
|
||||
|
||||
pub(crate) lbrace_tok: TokenRef<'a>,
|
||||
// This is None if there's an equal sign, otherwise it's the first token of
|
||||
// (conversion, format spec, right brace) in that order
|
||||
pub(crate) after_expr_tok: Option<TokenRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'r, 'a> Inflate<'a> for DeflatedTemplatedStringExpression<'r, 'a> {
|
||||
type Inflated = TemplatedStringExpression<'a>;
|
||||
fn inflate(mut self, config: &Config<'a>) -> Result<Self::Inflated> {
|
||||
let whitespace_before_expression = parse_parenthesizable_whitespace(
|
||||
config,
|
||||
&mut (*self.lbrace_tok).whitespace_after.borrow_mut(),
|
||||
)?;
|
||||
let expression = self.expression.inflate(config)?;
|
||||
let equal = self.equal.inflate(config)?;
|
||||
let whitespace_after_expression = if let Some(after_expr_tok) = self.after_expr_tok.as_mut()
|
||||
{
|
||||
parse_parenthesizable_whitespace(
|
||||
config,
|
||||
&mut after_expr_tok.whitespace_before.borrow_mut(),
|
||||
)?
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
let format_spec = self.format_spec.inflate(config)?;
|
||||
Ok(Self::Inflated {
|
||||
expression,
|
||||
conversion: self.conversion,
|
||||
format_spec,
|
||||
whitespace_before_expression,
|
||||
whitespace_after_expression,
|
||||
equal,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for TemplatedStringExpression<'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
state.add_token("{");
|
||||
self.whitespace_before_expression.codegen(state);
|
||||
self.expression.codegen(state);
|
||||
if let Some(eq) = &self.equal {
|
||||
eq.codegen(state);
|
||||
}
|
||||
self.whitespace_after_expression.codegen(state);
|
||||
if let Some(conv) = &self.conversion {
|
||||
state.add_token("!");
|
||||
state.add_token(conv);
|
||||
}
|
||||
if let Some(specs) = &self.format_spec {
|
||||
state.add_token(":");
|
||||
for spec in specs {
|
||||
spec.codegen(state);
|
||||
}
|
||||
}
|
||||
state.add_token("}");
|
||||
}
|
||||
}
|
||||
|
||||
#[cst_node(ParenthesizedNode)]
|
||||
pub struct TemplatedString<'a> {
|
||||
pub parts: Vec<TemplatedStringContent<'a>>,
|
||||
pub start: &'a str,
|
||||
pub end: &'a str,
|
||||
pub lpar: Vec<LeftParen<'a>>,
|
||||
pub rpar: Vec<RightParen<'a>>,
|
||||
}
|
||||
|
||||
impl<'r, 'a> Inflate<'a> for DeflatedTemplatedString<'r, 'a> {
|
||||
type Inflated = TemplatedString<'a>;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self::Inflated> {
|
||||
let lpar = self.lpar.inflate(config)?;
|
||||
let parts = self.parts.inflate(config)?;
|
||||
let rpar = self.rpar.inflate(config)?;
|
||||
Ok(Self::Inflated {
|
||||
parts,
|
||||
start: self.start,
|
||||
end: self.end,
|
||||
lpar,
|
||||
rpar,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for TemplatedString<'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
self.parenthesize(state, |state| {
|
||||
state.add_token(self.start);
|
||||
for part in &self.parts {
|
||||
part.codegen(state);
|
||||
}
|
||||
state.add_token(self.end);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cst_node(Codegen, Inflate)]
|
||||
pub enum TemplatedStringContent<'a> {
|
||||
Text(TemplatedStringText<'a>),
|
||||
Expression(Box<TemplatedStringExpression<'a>>),
|
||||
}
|
||||
#[cst_node]
|
||||
pub struct FormattedStringText<'a> {
|
||||
pub value: &'a str,
|
||||
|
|
@ -2531,8 +2670,8 @@ mod py {
|
|||
use crate::nodes::traits::py::TryIntoPy;
|
||||
|
||||
// TODO: this could be a derive helper attribute to override the python class name
|
||||
impl<'a> TryIntoPy<pyo3::PyObject> for Element<'a> {
|
||||
fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult<pyo3::PyObject> {
|
||||
impl<'a> TryIntoPy<pyo3::Py<pyo3::PyAny>> for Element<'a> {
|
||||
fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult<pyo3::Py<pyo3::PyAny>> {
|
||||
match self {
|
||||
Self::Starred(s) => s.try_into_py(py),
|
||||
Self::Simple { value, comma } => {
|
||||
|
|
@ -2560,8 +2699,8 @@ mod py {
|
|||
}
|
||||
|
||||
// TODO: this could be a derive helper attribute to override the python class name
|
||||
impl<'a> TryIntoPy<pyo3::PyObject> for DictElement<'a> {
|
||||
fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult<pyo3::PyObject> {
|
||||
impl<'a> TryIntoPy<pyo3::Py<pyo3::PyAny>> for DictElement<'a> {
|
||||
fn try_into_py(self, py: pyo3::Python) -> pyo3::PyResult<pyo3::Py<pyo3::PyAny>> {
|
||||
match self {
|
||||
Self::Starred(s) => s.try_into_py(py),
|
||||
Self::Simple {
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ macro_rules! py_import {
|
|||
( $module_name:expr, $member_name:expr, $getter_fn:ident ) => {
|
||||
paste::paste! {
|
||||
static [<IMPORT_CELL_ $getter_fn:snake:upper>]
|
||||
: pyo3::once_cell::GILOnceCell<pyo3::PyResult<pyo3::PyObject>>
|
||||
= pyo3::once_cell::GILOnceCell::new();
|
||||
: pyo3::once_cell::PyOnceLock<pyo3::PyResult<pyo3::Py<pyo3::PyAny>>>
|
||||
= pyo3::once_cell::PyOnceLock::new();
|
||||
|
||||
fn $getter_fn<'py>(py: pyo3::Python<'py>) -> pyo3::PyResult<&'py pyo3::PyAny> {
|
||||
Ok([<IMPORT_CELL_ $getter_fn:snake:upper>].get_or_init(py, || {
|
||||
|
|
|
|||
|
|
@ -31,7 +31,8 @@ pub use expression::{
|
|||
Integer, Lambda, LeftCurlyBrace, LeftParen, LeftSquareBracket, List, ListComp, Name,
|
||||
NameOrAttribute, NamedExpr, Param, ParamSlash, ParamStar, Parameters, RightCurlyBrace,
|
||||
RightParen, RightSquareBracket, Set, SetComp, SimpleString, Slice, StarArg, StarredDictElement,
|
||||
StarredElement, String, Subscript, SubscriptElement, Tuple, UnaryOperation, Yield, YieldValue,
|
||||
StarredElement, String, Subscript, SubscriptElement, TemplatedString, TemplatedStringContent,
|
||||
TemplatedStringExpression, Tuple, UnaryOperation, Yield, YieldValue,
|
||||
};
|
||||
|
||||
pub(crate) mod op;
|
||||
|
|
@ -78,7 +79,10 @@ pub(crate) mod deflated {
|
|||
DeflatedSlice as Slice, DeflatedStarArg as StarArg,
|
||||
DeflatedStarredDictElement as StarredDictElement, DeflatedStarredElement as StarredElement,
|
||||
DeflatedString as String, DeflatedSubscript as Subscript,
|
||||
DeflatedSubscriptElement as SubscriptElement, DeflatedTuple as Tuple,
|
||||
DeflatedSubscriptElement as SubscriptElement, DeflatedTemplatedString as TemplatedString,
|
||||
DeflatedTemplatedStringContent as TemplatedStringContent,
|
||||
DeflatedTemplatedStringExpression as TemplatedStringExpression,
|
||||
DeflatedTemplatedStringText as TemplatedStringText, DeflatedTuple as Tuple,
|
||||
DeflatedUnaryOperation as UnaryOperation, DeflatedYield as Yield,
|
||||
DeflatedYieldValue as YieldValue,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -29,12 +29,12 @@ impl BaseWhitespaceParserConfig {
|
|||
}
|
||||
|
||||
#[getter]
|
||||
fn get_lines(&self, py: Python) -> PyObject {
|
||||
fn get_lines(&self, py: Python) -> Py<PyAny> {
|
||||
self.lines.to_object(py)
|
||||
}
|
||||
|
||||
#[getter]
|
||||
fn get_default_newline(&self, py: Python) -> PyObject {
|
||||
fn get_default_newline(&self, py: Python) -> Py<PyAny> {
|
||||
self.default_newline.to_object(py)
|
||||
}
|
||||
}
|
||||
|
|
@ -62,23 +62,23 @@ impl BaseWhitespaceParserConfig {
|
|||
}
|
||||
}
|
||||
|
||||
// These fields are private and PyObject, since we don't currently care about using them from
|
||||
// These fields are private and Py<PyAny>, since we don't currently care about using them from
|
||||
// within rust.
|
||||
#[pyclass(extends=BaseWhitespaceParserConfig, module="libcst_native.parser_config")]
|
||||
#[text_signature = "(*, lines, encoding, default_indent, default_newline, has_trailing_newline, version, future_imports)"]
|
||||
pub struct ParserConfig {
|
||||
// lines is inherited
|
||||
#[pyo3(get)]
|
||||
encoding: PyObject,
|
||||
encoding: Py<PyAny>,
|
||||
#[pyo3(get)]
|
||||
default_indent: PyObject,
|
||||
default_indent: Py<PyAny>,
|
||||
// default_newline is inherited
|
||||
#[pyo3(get)]
|
||||
has_trailing_newline: PyObject,
|
||||
has_trailing_newline: Py<PyAny>,
|
||||
#[pyo3(get)]
|
||||
version: PyObject,
|
||||
version: Py<PyAny>,
|
||||
#[pyo3(get)]
|
||||
future_imports: PyObject,
|
||||
future_imports: Py<PyAny>,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
|
|
@ -86,12 +86,12 @@ impl ParserConfig {
|
|||
#[new]
|
||||
fn new(
|
||||
lines: &PySequence,
|
||||
encoding: PyObject,
|
||||
default_indent: PyObject,
|
||||
encoding: Py<PyAny>,
|
||||
default_indent: Py<PyAny>,
|
||||
default_newline: &PyString,
|
||||
has_trailing_newline: PyObject,
|
||||
version: PyObject,
|
||||
future_imports: PyObject,
|
||||
has_trailing_newline: Py<PyAny>,
|
||||
version: Py<PyAny>,
|
||||
future_imports: Py<PyAny>,
|
||||
) -> PyResult<(Self, BaseWhitespaceParserConfig)> {
|
||||
Ok((
|
||||
Self {
|
||||
|
|
|
|||
|
|
@ -7,11 +7,11 @@ use pyo3::prelude::*;
|
|||
use std::convert::AsRef;
|
||||
use std::ops::Deref;
|
||||
|
||||
/// An immutable wrapper around a rust type T and it's PyObject equivalent. Caches the conversion
|
||||
/// to and from the PyObject.
|
||||
/// An immutable wrapper around a rust type T and its Py<PyAny> equivalent. Caches the conversion
|
||||
/// to and from the Py<PyAny>.
|
||||
pub struct PyCached<T> {
|
||||
native: T,
|
||||
py_object: PyObject,
|
||||
py_object: Py<PyAny>,
|
||||
}
|
||||
|
||||
impl<T> PyCached<T>
|
||||
|
|
@ -31,7 +31,7 @@ where
|
|||
T: FromPyObject<'source>,
|
||||
{
|
||||
fn extract(ob: &'source PyAny) -> PyResult<Self> {
|
||||
Python::with_gil(|py| {
|
||||
Python::attach(|py| {
|
||||
Ok(PyCached {
|
||||
native: ob.extract()?,
|
||||
py_object: ob.to_object(py),
|
||||
|
|
@ -40,14 +40,14 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<T> IntoPy<PyObject> for PyCached<T> {
|
||||
fn into_py(self, _py: Python) -> PyObject {
|
||||
impl<T> IntoPy<Py<PyAny>> for PyCached<T> {
|
||||
fn into_py(self, _py: Python) -> Py<PyAny> {
|
||||
self.py_object
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ToPyObject for PyCached<T> {
|
||||
fn to_object(&self, py: Python) -> PyObject {
|
||||
fn to_object(&self, py: Python) -> Py<PyAny> {
|
||||
self.py_object.clone_ref(py)
|
||||
}
|
||||
}
|
||||
|
|
@ -71,6 +71,6 @@ where
|
|||
T: ToPyObject,
|
||||
{
|
||||
fn from(val: T) -> Self {
|
||||
Python::with_gil(|py| Self::new(py, val))
|
||||
Python::attach(|py| Self::new(py, val))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ impl<'a, T: Inflate<'a>> Inflate<'a> for Vec<T> {
|
|||
}
|
||||
#[cfg(feature = "py")]
|
||||
pub mod py {
|
||||
use pyo3::{types::PyTuple, IntoPyObjectExt, PyObject, PyResult, Python};
|
||||
use pyo3::{types::PyTuple, IntoPyObjectExt, Py, PyAny, PyResult, Python};
|
||||
|
||||
// TODO: replace with upstream implementation once
|
||||
// https://github.com/PyO3/pyo3/issues/1813 is resolved
|
||||
|
|
@ -133,26 +133,26 @@ pub mod py {
|
|||
// }
|
||||
// }
|
||||
|
||||
impl TryIntoPy<PyObject> for bool {
|
||||
fn try_into_py(self, py: Python) -> PyResult<PyObject> {
|
||||
impl TryIntoPy<Py<PyAny>> for bool {
|
||||
fn try_into_py(self, py: Python) -> PyResult<Py<PyAny>> {
|
||||
self.into_py_any(py)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: TryIntoPy<PyObject>> TryIntoPy<PyObject> for Box<T>
|
||||
impl<T: TryIntoPy<Py<PyAny>>> TryIntoPy<Py<PyAny>> for Box<T>
|
||||
where
|
||||
T: TryIntoPy<PyObject>,
|
||||
T: TryIntoPy<Py<PyAny>>,
|
||||
{
|
||||
fn try_into_py(self, py: Python) -> PyResult<PyObject> {
|
||||
fn try_into_py(self, py: Python) -> PyResult<Py<PyAny>> {
|
||||
(*self).try_into_py(py)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> TryIntoPy<PyObject> for Option<T>
|
||||
impl<T> TryIntoPy<Py<PyAny>> for Option<T>
|
||||
where
|
||||
T: TryIntoPy<PyObject>,
|
||||
T: TryIntoPy<Py<PyAny>>,
|
||||
{
|
||||
fn try_into_py(self, py: Python) -> PyResult<PyObject> {
|
||||
fn try_into_py(self, py: Python) -> PyResult<Py<PyAny>> {
|
||||
Ok(match self {
|
||||
None => py.None(),
|
||||
Some(x) => x.try_into_py(py)?,
|
||||
|
|
@ -160,11 +160,11 @@ pub mod py {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T> TryIntoPy<PyObject> for Vec<T>
|
||||
impl<T> TryIntoPy<Py<PyAny>> for Vec<T>
|
||||
where
|
||||
T: TryIntoPy<PyObject>,
|
||||
T: TryIntoPy<Py<PyAny>>,
|
||||
{
|
||||
fn try_into_py(self, py: Python) -> PyResult<PyObject> {
|
||||
fn try_into_py(self, py: Python) -> PyResult<Py<PyAny>> {
|
||||
let converted = self
|
||||
.into_iter()
|
||||
.map(|x| x.try_into_py(py))
|
||||
|
|
@ -174,8 +174,8 @@ pub mod py {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> TryIntoPy<PyObject> for &'a str {
|
||||
fn try_into_py(self, py: Python) -> PyResult<PyObject> {
|
||||
impl<'a> TryIntoPy<Py<PyAny>> for &'a str {
|
||||
fn try_into_py(self, py: Python) -> PyResult<Py<PyAny>> {
|
||||
self.into_py_any(py)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ mod py_error {
|
|||
|
||||
impl<'a> From<ParserError<'a>> for PyErr {
|
||||
fn from(e: ParserError) -> Self {
|
||||
Python::with_gil(|py| {
|
||||
Python::attach(|py| {
|
||||
let lines = match &e {
|
||||
ParserError::TokenizerError(_, text) | ParserError::ParserError(_, text) => {
|
||||
text.lines().collect::<Vec<_>>()
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ use std::rc::Rc;
|
|||
use crate::expression::make_async;
|
||||
use crate::nodes::deflated::*;
|
||||
use crate::nodes::expression::make_fstringtext;
|
||||
use crate::nodes::expression::make_tstringtext;
|
||||
use crate::nodes::op::make_importstar;
|
||||
use crate::nodes::traits::ParenthesizedDeflatedNode;
|
||||
use crate::parser::ParserError;
|
||||
|
|
@ -17,7 +18,8 @@ use peg::str::LineCol;
|
|||
use peg::{parser, Parse, ParseElem, RuleResult};
|
||||
use TokType::{
|
||||
Async, Await as AWAIT, Dedent, EndMarker, FStringEnd, FStringStart, FStringString, Indent,
|
||||
Name as NameTok, Newline as NL, Number, String as STRING,
|
||||
Name as NameTok, Newline as NL, Number, String as STRING, TStringEnd, TStringStart,
|
||||
TStringString,
|
||||
};
|
||||
|
||||
pub type Result<'a, T> = std::result::Result<T, ParserError<'a>>;
|
||||
|
|
@ -552,12 +554,21 @@ parser! {
|
|||
}
|
||||
|
||||
// Except statement
|
||||
|
||||
rule except_block() -> ExceptHandler<'input, 'a>
|
||||
= kw:lit("except") e:expression() a:(k:lit("as") n:name() {(k, n)})?
|
||||
col:lit(":") b:block() {
|
||||
make_except(kw, Some(e), a, col, b)
|
||||
}
|
||||
/ kw:lit("except") e:expression() other:(c:comma() ex:expression() {(c, ex)})+ tc:(c:comma())?
|
||||
col:lit(":") b:block() {
|
||||
let tuple = Expression::Tuple(Box::new(Tuple {
|
||||
elements: comma_separate(expr_to_element(e), other.into_iter().map(|(comma, expr)| (comma, expr_to_element(expr))).collect(), tc),
|
||||
lpar: vec![],
|
||||
rpar: vec![],
|
||||
}));
|
||||
|
||||
make_except(kw, Some(tuple), None, col, b)
|
||||
}
|
||||
/ kw:lit("except") col:lit(":") b:block() {
|
||||
make_except(kw, None, None, col, b)
|
||||
}
|
||||
|
|
@ -567,6 +578,16 @@ parser! {
|
|||
a:(k:lit("as") n:name() {(k, n)})? col:lit(":") b:block() {
|
||||
make_except_star(kw, star, e, a, col, b)
|
||||
}
|
||||
/ kw:lit("except") star:lit("*") e:expression() other:(c:comma() ex:expression() {(c, ex)})+ tc:(c:comma())?
|
||||
col:lit(":") b:block() {
|
||||
let tuple = Expression::Tuple(Box::new(Tuple {
|
||||
elements: comma_separate(expr_to_element(e), other.into_iter().map(|(comma, expr)| (comma, expr_to_element(expr))).collect(), tc),
|
||||
lpar: vec![],
|
||||
rpar: vec![],
|
||||
}));
|
||||
|
||||
make_except_star(kw, star, tuple, None, col, b)
|
||||
}
|
||||
|
||||
rule finally_block() -> Finally<'input, 'a>
|
||||
= kw:lit("finally") col:lit(":") b:block() {
|
||||
|
|
@ -1043,7 +1064,7 @@ parser! {
|
|||
/ n:lit("True") { Expression::Name(Box::new(make_name(n))) }
|
||||
/ n:lit("False") { Expression::Name(Box::new(make_name(n))) }
|
||||
/ n:lit("None") { Expression::Name(Box::new(make_name(n))) }
|
||||
/ &(tok(STRING, "") / tok(FStringStart, "")) s:strings() {s.into()}
|
||||
/ &(tok(STRING, "") / tok(FStringStart, "") / tok(TStringStart, "")) s:strings() {s.into()}
|
||||
/ n:tok(Number, "NUMBER") { make_number(n) }
|
||||
/ &lit("(") e:(tuple() / group() / (g:genexp() {Expression::GeneratorExp(Box::new(g))})) {e}
|
||||
/ &lit("[") e:(list() / listcomp()) {e}
|
||||
|
|
@ -1151,7 +1172,7 @@ parser! {
|
|||
|
||||
rule strings() -> String<'input, 'a>
|
||||
= s:(str:tok(STRING, "STRING") t:&_ {(make_string(str), t)}
|
||||
/ str:fstring() t:&_ {(String::Formatted(str), t)})+ {?
|
||||
/ str:fstring() t:&_ {(String::Formatted(str), t)} / str:tstring() t:&_ {(String::Templated(str), t)})+ {?
|
||||
make_strings(s)
|
||||
}
|
||||
|
||||
|
|
@ -1463,6 +1484,34 @@ parser! {
|
|||
rule _f_spec() -> Vec<FormattedStringContent<'input, 'a>>
|
||||
= (_f_string() / _f_replacement())*
|
||||
|
||||
// T-strings
|
||||
|
||||
rule tstring() -> TemplatedString<'input, 'a>
|
||||
= start:tok(TStringStart, "t\"")
|
||||
parts:(_t_string() / _t_replacement())*
|
||||
end:tok(TStringEnd, "\"") {
|
||||
make_tstring(start.string, parts, end.string)
|
||||
}
|
||||
|
||||
rule _t_string() -> TemplatedStringContent<'input, 'a>
|
||||
= t:tok(TStringString, "t-string contents") {
|
||||
TemplatedStringContent::Text(make_tstringtext(t.string))
|
||||
}
|
||||
|
||||
|
||||
rule _t_replacement() -> TemplatedStringContent<'input, 'a>
|
||||
= lb:lit("{") e:annotated_rhs() eq:lit("=")?
|
||||
conv:(t:lit("!") c:_f_conversion() {(t,c)})?
|
||||
spec:(t:lit(":") s:_t_spec() {(t,s)})?
|
||||
rb:lit("}") {
|
||||
TemplatedStringContent::Expression(Box::new(
|
||||
make_tstring_expression(lb, e, eq, conv, spec, rb)
|
||||
))
|
||||
}
|
||||
|
||||
rule _t_spec() -> Vec<TemplatedStringContent<'input, 'a>>
|
||||
= (_t_string() / _t_replacement())*
|
||||
|
||||
// CST helpers
|
||||
|
||||
rule comma() -> Comma<'input, 'a>
|
||||
|
|
@ -1520,22 +1569,22 @@ parser! {
|
|||
rule separated<El, Sep>(el: rule<El>, sep: rule<Sep>) -> (El, Vec<(Sep, El)>)
|
||||
= e:el() rest:(s:sep() e:el() {(s, e)})* {(e, rest)}
|
||||
|
||||
rule traced<T>(e: rule<T>) -> T =
|
||||
&(_* {
|
||||
rule traced<T>(e: rule<T>) -> T =
|
||||
&(_* {
|
||||
#[cfg(feature = "trace")]
|
||||
{
|
||||
println!("[PEG_INPUT_START]");
|
||||
println!("{}", input);
|
||||
println!("[PEG_TRACE_START]");
|
||||
}
|
||||
})
|
||||
e:e()? {?
|
||||
})
|
||||
e:e()? {?
|
||||
#[cfg(feature = "trace")]
|
||||
println!("[PEG_TRACE_STOP]");
|
||||
e.ok_or("")
|
||||
}
|
||||
println!("[PEG_TRACE_STOP]");
|
||||
e.ok_or("")
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
|
@ -2877,6 +2926,48 @@ fn make_strings<'input, 'a>(
|
|||
}))
|
||||
}
|
||||
|
||||
fn make_tstring_expression<'input, 'a>(
|
||||
lbrace_tok: TokenRef<'input, 'a>,
|
||||
expression: Expression<'input, 'a>,
|
||||
eq: Option<TokenRef<'input, 'a>>,
|
||||
conversion_pair: Option<(TokenRef<'input, 'a>, &'a str)>,
|
||||
format_pair: Option<(
|
||||
TokenRef<'input, 'a>,
|
||||
Vec<TemplatedStringContent<'input, 'a>>,
|
||||
)>,
|
||||
rbrace_tok: TokenRef<'input, 'a>,
|
||||
) -> TemplatedStringExpression<'input, 'a> {
|
||||
let equal: Option<AssignEqual<'_, '_>> = eq.map(make_assign_equal);
|
||||
let (conversion_tok, conversion) = if let Some((t, c)) = conversion_pair {
|
||||
(Some(t), Some(c))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
let (format_tok, format_spec) = if let Some((t, f)) = format_pair {
|
||||
(Some(t), Some(f))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
let after_expr_tok = if equal.is_some() {
|
||||
None
|
||||
} else if let Some(tok) = conversion_tok {
|
||||
Some(tok)
|
||||
} else if let Some(tok) = format_tok {
|
||||
Some(tok)
|
||||
} else {
|
||||
Some(rbrace_tok)
|
||||
};
|
||||
|
||||
TemplatedStringExpression {
|
||||
expression,
|
||||
conversion,
|
||||
format_spec,
|
||||
equal,
|
||||
lbrace_tok,
|
||||
after_expr_tok,
|
||||
}
|
||||
}
|
||||
|
||||
fn make_fstring_expression<'input, 'a>(
|
||||
lbrace_tok: TokenRef<'input, 'a>,
|
||||
expression: Expression<'input, 'a>,
|
||||
|
|
@ -2933,6 +3024,20 @@ fn make_fstring<'input, 'a>(
|
|||
}
|
||||
}
|
||||
|
||||
fn make_tstring<'input, 'a>(
|
||||
start: &'a str,
|
||||
parts: Vec<TemplatedStringContent<'input, 'a>>,
|
||||
end: &'a str,
|
||||
) -> TemplatedString<'input, 'a> {
|
||||
TemplatedString {
|
||||
start,
|
||||
parts,
|
||||
end,
|
||||
lpar: Default::default(),
|
||||
rpar: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_finally<'input, 'a>(
|
||||
finally_tok: TokenRef<'input, 'a>,
|
||||
colon_tok: TokenRef<'input, 'a>,
|
||||
|
|
|
|||
|
|
@ -11,21 +11,21 @@ use pyo3::prelude::*;
|
|||
pub fn libcst_native(_py: Python, m: &Bound<PyModule>) -> PyResult<()> {
|
||||
#[pyfn(m)]
|
||||
#[pyo3(signature = (source, encoding=None))]
|
||||
fn parse_module(source: String, encoding: Option<&str>) -> PyResult<PyObject> {
|
||||
fn parse_module(source: String, encoding: Option<&str>) -> PyResult<Py<PyAny>> {
|
||||
let m = crate::parse_module(source.as_str(), encoding)?;
|
||||
Python::with_gil(|py| m.try_into_py(py))
|
||||
Python::attach(|py| m.try_into_py(py))
|
||||
}
|
||||
|
||||
#[pyfn(m)]
|
||||
fn parse_expression(source: String) -> PyResult<PyObject> {
|
||||
fn parse_expression(source: String) -> PyResult<Py<PyAny>> {
|
||||
let expr = crate::parse_expression(source.as_str())?;
|
||||
Python::with_gil(|py| expr.try_into_py(py))
|
||||
Python::attach(|py| expr.try_into_py(py))
|
||||
}
|
||||
|
||||
#[pyfn(m)]
|
||||
fn parse_statement(source: String) -> PyResult<PyObject> {
|
||||
fn parse_statement(source: String) -> PyResult<Py<PyAny>> {
|
||||
let stm = crate::parse_statement(source.as_str())?;
|
||||
Python::with_gil(|py| stm.try_into_py(py))
|
||||
Python::attach(|py| stm.try_into_py(py))
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -66,8 +66,9 @@ use std::fmt::Debug;
|
|||
use std::fmt::Formatter;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::tokenizer::core::string_types::FTStringType;
|
||||
use crate::tokenizer::{
|
||||
core::string_types::{FStringNode, StringQuoteChar, StringQuoteSize},
|
||||
core::string_types::{FTStringNode, StringQuoteChar, StringQuoteSize},
|
||||
operators::OPERATOR_RE,
|
||||
text_position::{TextPosition, TextPositionSnapshot},
|
||||
whitespace_parser::State as WhitespaceState,
|
||||
|
|
@ -86,7 +87,7 @@ thread_local! {
|
|||
static SPACE_TAB_FORMFEED_RE: Regex = Regex::new(r"\A[ \f\t]+").expect("regex");
|
||||
static ANY_NON_NEWLINE_RE: Regex = Regex::new(r"\A[^\r\n]+").expect("regex");
|
||||
static STRING_PREFIX_RE: Regex =
|
||||
Regex::new(r"\A(?i)(u|[bf]r|r[bf]|r|b|f)").expect("regex");
|
||||
Regex::new(r"\A(?i)(u|[bf]r|r[bft]|r|b|f|t)").expect("regex");
|
||||
static POTENTIAL_IDENTIFIER_TAIL_RE: Regex =
|
||||
Regex::new(r"\A([a-zA-Z0-9_]|[^\x00-\x7f])+").expect("regex");
|
||||
static DECIMAL_DOT_DIGIT_RE: Regex = Regex::new(r"\A\.[0-9]").expect("regex");
|
||||
|
|
@ -118,6 +119,9 @@ pub enum TokType {
|
|||
FStringStart,
|
||||
FStringString,
|
||||
FStringEnd,
|
||||
TStringStart,
|
||||
TStringString,
|
||||
TStringEnd,
|
||||
EndMarker,
|
||||
}
|
||||
|
||||
|
|
@ -222,8 +226,8 @@ pub struct TokState<'t> {
|
|||
///
|
||||
/// Supporting this at the tokenizer-level is pretty nasty and adds a lot of complexity.
|
||||
/// Eventually, we should probably support this at the parser-level instead.
|
||||
split_fstring: bool,
|
||||
fstring_stack: Vec<FStringNode>,
|
||||
split_ftstring: bool,
|
||||
ftstring_stack: Vec<FTStringNode>,
|
||||
|
||||
missing_nl_before_eof: bool,
|
||||
}
|
||||
|
|
@ -233,7 +237,7 @@ pub struct TokConfig {
|
|||
/// identifiers, depending on if they're being used in the context of an async function. This
|
||||
/// breaks async comprehensions outside of async functions.
|
||||
pub async_hacks: bool,
|
||||
pub split_fstring: bool,
|
||||
pub split_ftstring: bool,
|
||||
// Not currently supported:
|
||||
// type_comments: bool,
|
||||
}
|
||||
|
|
@ -272,8 +276,8 @@ impl<'t> TokState<'t> {
|
|||
async_def: false,
|
||||
async_def_indent: 0,
|
||||
async_def_nl: false,
|
||||
split_fstring: config.split_fstring,
|
||||
fstring_stack: Vec::new(),
|
||||
split_ftstring: config.split_ftstring,
|
||||
ftstring_stack: Vec::new(),
|
||||
missing_nl_before_eof: text.is_empty() || text.as_bytes()[text.len() - 1] != b'\n',
|
||||
}
|
||||
}
|
||||
|
|
@ -285,18 +289,18 @@ impl<'t> TokState<'t> {
|
|||
/// Implementation of `next()`, wrapped by next() to allow for easier error handling. Roughly
|
||||
/// equivalent to `tok_get` in the C source code.
|
||||
fn next_inner(&mut self) -> Result<TokType, TokError<'t>> {
|
||||
if self.split_fstring {
|
||||
if let Some(tos) = self.fstring_stack.last() {
|
||||
if self.split_ftstring {
|
||||
if let Some(tos) = self.ftstring_stack.last() {
|
||||
if !tos.is_in_expr() {
|
||||
self.start_pos = (&self.text_pos).into();
|
||||
let is_in_format_spec = tos.is_in_format_spec();
|
||||
let is_raw_string = tos.is_raw_string;
|
||||
if let Some(tok) =
|
||||
self.maybe_consume_fstring_string(is_in_format_spec, is_raw_string)?
|
||||
self.maybe_consume_ftstring_string(is_in_format_spec, is_raw_string)?
|
||||
{
|
||||
return Ok(tok);
|
||||
}
|
||||
if let Some(tok) = self.maybe_consume_fstring_end() {
|
||||
if let Some(tok) = self.maybe_consume_ftstring_end() {
|
||||
return Ok(tok);
|
||||
}
|
||||
}
|
||||
|
|
@ -362,8 +366,11 @@ impl<'t> TokState<'t> {
|
|||
Some('\n') => {
|
||||
self.text_pos.next();
|
||||
self.at_bol = true;
|
||||
if self.split_fstring
|
||||
&& self.fstring_stack.last().map(|node| node.allow_multiline())
|
||||
if self.split_ftstring
|
||||
&& self
|
||||
.ftstring_stack
|
||||
.last()
|
||||
.map(|node| node.allow_multiline())
|
||||
== Some(false)
|
||||
{
|
||||
Err(TokError::UnterminatedString)
|
||||
|
|
@ -420,7 +427,7 @@ impl<'t> TokState<'t> {
|
|||
|
||||
Some(ch @ '(') | Some(ch @ '[') | Some(ch @ '{') => {
|
||||
self.text_pos.next();
|
||||
if let Some(tos) = self.fstring_stack.last_mut() {
|
||||
if let Some(tos) = self.ftstring_stack.last_mut() {
|
||||
tos.open_parentheses();
|
||||
}
|
||||
self.paren_stack.push((ch, self.text_pos.line_number()));
|
||||
|
|
@ -429,7 +436,7 @@ impl<'t> TokState<'t> {
|
|||
|
||||
Some(closing @ ')') | Some(closing @ ']') | Some(closing @ '}') => {
|
||||
self.text_pos.next();
|
||||
if let Some(tos) = self.fstring_stack.last_mut() {
|
||||
if let Some(tos) = self.ftstring_stack.last_mut() {
|
||||
tos.close_parentheses();
|
||||
}
|
||||
if let Some((opening, line_number)) = self.paren_stack.pop() {
|
||||
|
|
@ -454,7 +461,7 @@ impl<'t> TokState<'t> {
|
|||
|
||||
Some(':')
|
||||
if self
|
||||
.fstring_stack
|
||||
.ftstring_stack
|
||||
.last()
|
||||
.map(|tos| tos.parentheses_count - tos.format_spec_count == 1)
|
||||
.unwrap_or(false) =>
|
||||
|
|
@ -465,9 +472,9 @@ impl<'t> TokState<'t> {
|
|||
//
|
||||
// >>> f'{x:=10}' # Valid, passes '=10' to formatter
|
||||
let tos = self
|
||||
.fstring_stack
|
||||
.ftstring_stack
|
||||
.last_mut()
|
||||
.expect("fstring_stack is not empty");
|
||||
.expect("ftstring_stack is not empty");
|
||||
tos.format_spec_count += 1;
|
||||
self.text_pos.next();
|
||||
Ok(TokType::Op)
|
||||
|
|
@ -624,20 +631,27 @@ impl<'t> TokState<'t> {
|
|||
}
|
||||
|
||||
fn consume_identifier_or_prefixed_string(&mut self) -> Result<TokType, TokError<'t>> {
|
||||
// Process the various legal combinations of b"", r"", u"", and f"".
|
||||
// Process the various legal combinations of b"", r"", u"",f"", and t"".
|
||||
if STRING_PREFIX_RE.with(|r| self.text_pos.consume(r)) {
|
||||
if let Some('"') | Some('\'') = self.text_pos.peek() {
|
||||
// We found a string, not an identifier. Bail!
|
||||
if self.split_fstring
|
||||
&& self
|
||||
if self.split_ftstring {
|
||||
let res = match self
|
||||
.text_pos
|
||||
.slice_from_start_pos(&self.start_pos)
|
||||
.contains(&['f', 'F'][..])
|
||||
{
|
||||
return self.consume_fstring_start();
|
||||
} else {
|
||||
return self.consume_string();
|
||||
.chars()
|
||||
.find(|c| matches!(c, 'f' | 'F' | 't' | 'T'))
|
||||
{
|
||||
Some('f' | 'F') => Some(FTStringType::FString),
|
||||
Some('t' | 'T') => Some(FTStringType::TString),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(str_type) = res {
|
||||
// Consume the prefix and return the start token
|
||||
return self.consume_prefixed_string_start(str_type);
|
||||
}
|
||||
}
|
||||
return self.consume_string();
|
||||
}
|
||||
} else {
|
||||
// the next character must be a potential identifier start, aka `[a-zA-Z_]|[^\x00-\x7f]`
|
||||
|
|
@ -880,24 +894,43 @@ impl<'t> TokState<'t> {
|
|||
Ok(TokType::String)
|
||||
}
|
||||
|
||||
fn consume_fstring_start(&mut self) -> Result<TokType, TokError<'t>> {
|
||||
fn consume_prefixed_string_start(
|
||||
&mut self,
|
||||
str_type: FTStringType,
|
||||
) -> Result<TokType, TokError<'t>> {
|
||||
// Consumes everything after the (f|t) but before the actual string.
|
||||
let (quote_char, quote_size) = self.consume_open_quote();
|
||||
let is_raw_string = self
|
||||
.text_pos
|
||||
.slice_from_start_pos(&self.start_pos)
|
||||
.contains(&['r', 'R'][..]);
|
||||
self.fstring_stack
|
||||
.push(FStringNode::new(quote_char, quote_size, is_raw_string));
|
||||
Ok(TokType::FStringStart)
|
||||
self.ftstring_stack.push(FTStringNode::new(
|
||||
quote_char,
|
||||
quote_size,
|
||||
is_raw_string,
|
||||
str_type.clone(),
|
||||
));
|
||||
|
||||
match str_type {
|
||||
FTStringType::FString => Ok(TokType::FStringStart),
|
||||
FTStringType::TString => Ok(TokType::TStringStart),
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_consume_fstring_string(
|
||||
fn maybe_consume_ftstring_string(
|
||||
&mut self,
|
||||
is_in_format_spec: bool,
|
||||
is_raw_string: bool,
|
||||
) -> Result<Option<TokType>, TokError<'t>> {
|
||||
let allow_multiline =
|
||||
self.fstring_stack.last().map(|node| node.allow_multiline()) == Some(true);
|
||||
let allow_multiline = self
|
||||
.ftstring_stack
|
||||
.last()
|
||||
.map(|node| node.allow_multiline())
|
||||
== Some(true);
|
||||
let str_type = self
|
||||
.ftstring_stack
|
||||
.last()
|
||||
.map(|node| node.string_type.clone());
|
||||
let mut in_named_unicode: bool = false;
|
||||
let mut ok_result = Ok(None); // value to return if we reach the end and don't error out
|
||||
'outer: loop {
|
||||
|
|
@ -910,7 +943,7 @@ impl<'t> TokState<'t> {
|
|||
}
|
||||
(ch @ Some('\''), _) | (ch @ Some('"'), _) => {
|
||||
// see if this actually terminates the most recent fstring
|
||||
if let Some(node) = self.fstring_stack.last() {
|
||||
if let Some(node) = self.ftstring_stack.last() {
|
||||
if ch == Some(node.quote_char.into()) {
|
||||
match node.quote_size {
|
||||
StringQuoteSize::Single => {
|
||||
|
|
@ -999,22 +1032,30 @@ impl<'t> TokState<'t> {
|
|||
self.text_pos.next();
|
||||
}
|
||||
}
|
||||
ok_result = Ok(Some(TokType::FStringString));
|
||||
ok_result = match str_type {
|
||||
Some(FTStringType::FString) => Ok(Some(TokType::FStringString)),
|
||||
Some(FTStringType::TString) => Ok(Some(TokType::TStringString)),
|
||||
None => unreachable!("We should always have a string type"),
|
||||
};
|
||||
}
|
||||
ok_result
|
||||
}
|
||||
|
||||
fn maybe_consume_fstring_end(&mut self) -> Option<TokType> {
|
||||
fn maybe_consume_ftstring_end(&mut self) -> Option<TokType> {
|
||||
let ch = self.text_pos.peek();
|
||||
if let Some(node) = self.fstring_stack.last() {
|
||||
if let Some(node) = self.ftstring_stack.last() {
|
||||
if ch == Some(node.quote_char.into()) {
|
||||
if node.quote_size == StringQuoteSize::Triple {
|
||||
self.text_pos.consume(node.quote_char.triple_str());
|
||||
} else {
|
||||
self.text_pos.next(); // already matched
|
||||
}
|
||||
self.fstring_stack.pop();
|
||||
return Some(TokType::FStringEnd);
|
||||
let tok_type = match node.string_type {
|
||||
FTStringType::FString => TokType::FStringEnd,
|
||||
FTStringType::TString => TokType::TStringEnd,
|
||||
};
|
||||
self.ftstring_stack.pop();
|
||||
return Some(tok_type);
|
||||
}
|
||||
}
|
||||
None
|
||||
|
|
|
|||
|
|
@ -67,7 +67,13 @@ impl TryFrom<Option<char>> for StringQuoteChar {
|
|||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FStringNode {
|
||||
pub enum FTStringType {
|
||||
FString,
|
||||
TString,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FTStringNode {
|
||||
pub quote_char: StringQuoteChar,
|
||||
pub quote_size: StringQuoteSize,
|
||||
pub parentheses_count: usize,
|
||||
|
|
@ -75,13 +81,16 @@ pub struct FStringNode {
|
|||
// In the syntax there can be multiple format_spec's nested: {x:{y:3}}
|
||||
pub format_spec_count: usize,
|
||||
pub is_raw_string: bool,
|
||||
// ftstring type; either f-string or a t-string
|
||||
pub string_type: FTStringType,
|
||||
}
|
||||
|
||||
impl FStringNode {
|
||||
impl FTStringNode {
|
||||
pub fn new(
|
||||
quote_char: StringQuoteChar,
|
||||
quote_size: StringQuoteSize,
|
||||
is_raw_string: bool,
|
||||
string_type: FTStringType,
|
||||
) -> Self {
|
||||
Self {
|
||||
quote_char,
|
||||
|
|
@ -90,6 +99,7 @@ impl FStringNode {
|
|||
string_start: None,
|
||||
format_spec_count: 0,
|
||||
is_raw_string,
|
||||
string_type,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ pub const OPERATORS: &[&str] = &[
|
|||
"->", // RARROW
|
||||
"...", // ELLIPSIS
|
||||
":=", // COLONEQUAL
|
||||
// Not a real operator, but needed to support the split_fstring feature
|
||||
// Not a real operator, but needed to support the split_ftstring feature
|
||||
"!",
|
||||
// The fake operator added by PEP 401. Technically only valid if used with:
|
||||
//
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use crate::tokenizer::core::{TokConfig, TokError, TokState, TokType};
|
|||
fn default_config() -> TokConfig {
|
||||
TokConfig {
|
||||
async_hacks: false,
|
||||
split_fstring: false,
|
||||
split_ftstring: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -534,7 +534,7 @@ fn test_string_prefix() {
|
|||
Ok(vec![(TokType::String, r#"r'\\'"#)]),
|
||||
);
|
||||
let config = TokConfig {
|
||||
split_fstring: true,
|
||||
split_ftstring: true,
|
||||
..default_config()
|
||||
};
|
||||
assert_eq!(
|
||||
|
|
@ -564,9 +564,9 @@ fn test_string_prefix() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_split_fstring() {
|
||||
fn test_split_ftstring() {
|
||||
let config = TokConfig {
|
||||
split_fstring: true,
|
||||
split_ftstring: true,
|
||||
..default_config()
|
||||
};
|
||||
|
||||
|
|
@ -662,7 +662,7 @@ fn test_split_fstring() {
|
|||
#[test]
|
||||
fn test_fstring_escapes() {
|
||||
let config = TokConfig {
|
||||
split_fstring: true,
|
||||
split_ftstring: true,
|
||||
..default_config()
|
||||
};
|
||||
assert_eq!(
|
||||
|
|
@ -831,7 +831,7 @@ fn test_inconsistent_indentation_at_eof() {
|
|||
#[test]
|
||||
fn test_nested_f_string_specs() {
|
||||
let config = TokConfig {
|
||||
split_fstring: true,
|
||||
split_ftstring: true,
|
||||
..default_config()
|
||||
};
|
||||
assert_eq!(
|
||||
|
|
@ -857,7 +857,7 @@ fn test_nested_f_string_specs() {
|
|||
#[test]
|
||||
fn test_nested_f_strings() {
|
||||
let config = TokConfig {
|
||||
split_fstring: true,
|
||||
split_ftstring: true,
|
||||
..default_config()
|
||||
};
|
||||
assert_eq!(
|
||||
|
|
@ -875,3 +875,45 @@ fn test_nested_f_strings() {
|
|||
])
|
||||
)
|
||||
}
|
||||
#[test]
|
||||
fn test_can_tokenize_t_string_basic() {
|
||||
let config = TokConfig {
|
||||
split_ftstring: true,
|
||||
..default_config()
|
||||
};
|
||||
assert_eq!(
|
||||
tokenize_all("t'Nothing to see here, move along'", &config),
|
||||
Ok(vec![
|
||||
(TokType::TStringStart, "t'"),
|
||||
(TokType::TStringString, "Nothing to see here, move along"),
|
||||
(TokType::TStringEnd, "'")
|
||||
])
|
||||
)
|
||||
}
|
||||
#[test]
|
||||
fn test_can_tokenize_f_and_t_strings() {
|
||||
let config = TokConfig {
|
||||
split_ftstring: true,
|
||||
..default_config()
|
||||
};
|
||||
assert_eq!(
|
||||
tokenize_all("t\"TMiddle{f'FMiddle{t'{2}'}'}\"", &config),
|
||||
Ok(vec![
|
||||
(TokType::TStringStart, "t\""),
|
||||
(TokType::TStringString, "TMiddle"),
|
||||
(TokType::Op, "{"),
|
||||
(TokType::FStringStart, "f'"),
|
||||
(TokType::FStringString, "FMiddle"),
|
||||
(TokType::Op, "{"),
|
||||
(TokType::TStringStart, "t'"),
|
||||
(TokType::Op, "{"),
|
||||
(TokType::Number, "2"),
|
||||
(TokType::Op, "}"),
|
||||
(TokType::TStringEnd, "'"),
|
||||
(TokType::Op, "}"),
|
||||
(TokType::FStringEnd, "'"),
|
||||
(TokType::Op, "}"),
|
||||
(TokType::TStringEnd, "\"")
|
||||
])
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -48,3 +48,11 @@ f'some words {a+b:.3f} more words {c+d=} final words'
|
|||
f"{'':*^{1:{1}}}"
|
||||
f"{'':*^{1:{1:{1}}}}"
|
||||
f"{f"{f"{f"{f"{f"{1+1}"}"}"}"}"}"
|
||||
|
||||
|
||||
t'Nothing to see here, move along'
|
||||
t"User {action}: {amount:.2f} {item}"
|
||||
t"<p>HTML is code {too}</p>"
|
||||
t"value={value!r}"
|
||||
t"This wrinkles my brain {value:.{precision}f}"
|
||||
_ = t"everything" + t" is {tstrings}"
|
||||
|
|
|
|||
22
native/libcst/tests/fixtures/terrible_tries.py
vendored
22
native/libcst/tests/fixtures/terrible_tries.py
vendored
|
|
@ -69,3 +69,25 @@ except foo:
|
|||
pass
|
||||
|
||||
#9
|
||||
|
||||
try:
|
||||
pass
|
||||
except (foo, bar):
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except foo, bar:
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except (foo, bar), baz:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except* something, somethingelse:
|
||||
pass
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "libcst_derive"
|
||||
version = "1.8.2"
|
||||
version = "1.8.6"
|
||||
edition = "2018"
|
||||
description = "Proc macro helpers for libcst."
|
||||
license = "MIT"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ requires = ["setuptools", "setuptools-scm", "setuptools-rust", "wheel"]
|
|||
|
||||
[project]
|
||||
name = "libcst"
|
||||
description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.13 programs."
|
||||
description = "A concrete syntax tree with AST-like properties for Python 3.0 through 3.14 programs."
|
||||
readme = "README.rst"
|
||||
dynamic = ["version"]
|
||||
license = { file = "LICENSE" }
|
||||
|
|
@ -15,12 +15,15 @@ classifiers = [
|
|||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
"Programming Language :: Python :: Free Threading",
|
||||
"Typing :: Typed",
|
||||
]
|
||||
requires-python = ">=3.9"
|
||||
dependencies = [
|
||||
"pyyaml>=5.2; python_version < '3.13'",
|
||||
"pyyaml-ft>=8.0.0; python_version >= '3.13'",
|
||||
"pyyaml-ft>=8.0.0; python_version == '3.13'",
|
||||
"pyyaml>=6.0.3; python_version >= '3.14'",
|
||||
"typing-extensions; python_version < '3.10'",
|
||||
]
|
||||
|
||||
|
|
@ -66,6 +69,7 @@ fail_under = 93
|
|||
precision = 1
|
||||
show_missing = true
|
||||
skip_covered = true
|
||||
omit = ["*/_parser/*"] # temporary while I remove the parser
|
||||
|
||||
[tool.uv]
|
||||
cache-keys = [
|
||||
|
|
@ -109,6 +113,12 @@ skip = [
|
|||
"*-musllinux_armv7l",
|
||||
]
|
||||
enable = ["cpython-freethreading"]
|
||||
test-command = [
|
||||
"python --version",
|
||||
"python -m libcst.tool list",
|
||||
# TODO: remove the gil once thread-safety issues are resolved
|
||||
"python -X gil=1 -m libcst.tool codemod remove_unused_imports.RemoveUnusedImportsCommand {project}/libcst/_nodes",
|
||||
]
|
||||
|
||||
[tool.cibuildwheel.linux]
|
||||
environment-pass = ["LIBCST_NO_LOCAL_SCHEME"]
|
||||
|
|
|
|||
111
uv.lock
generated
111
uv.lock
generated
|
|
@ -2,7 +2,8 @@ version = 1
|
|||
revision = 2
|
||||
requires-python = ">=3.9"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13'",
|
||||
"python_full_version >= '3.14'",
|
||||
"python_full_version == '3.13.*'",
|
||||
"python_full_version >= '3.11' and python_full_version < '3.13'",
|
||||
"python_full_version == '3.10.*'",
|
||||
"python_full_version < '3.10'",
|
||||
|
|
@ -25,7 +26,8 @@ name = "alabaster"
|
|||
version = "1.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13'",
|
||||
"python_full_version >= '3.14'",
|
||||
"python_full_version == '3.13.*'",
|
||||
"python_full_version >= '3.11' and python_full_version < '3.13'",
|
||||
"python_full_version == '3.10.*'",
|
||||
]
|
||||
|
|
@ -400,7 +402,8 @@ name = "click"
|
|||
version = "8.2.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13'",
|
||||
"python_full_version >= '3.14'",
|
||||
"python_full_version == '3.13.*'",
|
||||
"python_full_version >= '3.11' and python_full_version < '3.13'",
|
||||
"python_full_version == '3.10.*'",
|
||||
]
|
||||
|
|
@ -841,7 +844,8 @@ name = "ipython"
|
|||
version = "9.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13'",
|
||||
"python_full_version >= '3.14'",
|
||||
"python_full_version == '3.13.*'",
|
||||
"python_full_version >= '3.11' and python_full_version < '3.13'",
|
||||
]
|
||||
dependencies = [
|
||||
|
|
@ -1062,7 +1066,8 @@ dependencies = [
|
|||
{ name = "jsonschema", extra = ["format-nongpl"] },
|
||||
{ name = "packaging" },
|
||||
{ name = "python-json-logger" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "pyyaml", version = "6.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" },
|
||||
{ name = "pyyaml", version = "6.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" },
|
||||
{ name = "referencing" },
|
||||
{ name = "rfc3339-validator" },
|
||||
{ name = "rfc3986-validator" },
|
||||
|
|
@ -1205,8 +1210,9 @@ wheels = [
|
|||
name = "libcst"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "pyyaml", marker = "python_full_version < '3.13'" },
|
||||
{ name = "pyyaml-ft", marker = "python_full_version >= '3.13'" },
|
||||
{ name = "pyyaml", version = "6.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.13'" },
|
||||
{ name = "pyyaml", version = "6.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" },
|
||||
{ name = "pyyaml-ft", marker = "python_full_version == '3.13.*'" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.10'" },
|
||||
]
|
||||
|
||||
|
|
@ -1257,7 +1263,8 @@ docs = [
|
|||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "pyyaml", marker = "python_full_version < '3.13'", specifier = ">=5.2" },
|
||||
{ name = "pyyaml-ft", marker = "python_full_version >= '3.13'", specifier = ">=8.0.0" },
|
||||
{ name = "pyyaml", marker = "python_full_version >= '3.14'", specifier = ">=6.0.3" },
|
||||
{ name = "pyyaml-ft", marker = "python_full_version == '3.13.*'", specifier = ">=8.0.0" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.10'" },
|
||||
]
|
||||
|
||||
|
|
@ -1654,7 +1661,8 @@ version = "0.35.0"
|
|||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pastel" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "pyyaml", version = "6.0.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.14'" },
|
||||
{ name = "pyyaml", version = "6.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.14'" },
|
||||
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d6/b1/d4f4361b278fae10f6074675385ce3acf53c647f8e6eeba22c652f8ba985/poethepoet-0.35.0.tar.gz", hash = "sha256:b396ae862d7626e680bbd0985b423acf71634ce93a32d8b5f38340f44f5fbc3e", size = 66006, upload-time = "2025-06-09T12:58:18.849Z" }
|
||||
|
|
@ -1859,6 +1867,12 @@ wheels = [
|
|||
name = "pyyaml"
|
||||
version = "6.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version == '3.13.*'",
|
||||
"python_full_version >= '3.11' and python_full_version < '3.13'",
|
||||
"python_full_version == '3.10.*'",
|
||||
"python_full_version < '3.10'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" },
|
||||
|
|
@ -1908,6 +1922,82 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.14'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/62/67fc8e68a75f738c9200422bf65693fb79a4cd0dc5b23310e5202e978090/pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da", size = 184450, upload-time = "2025-09-25T21:33:00.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/92/861f152ce87c452b11b9d0977952259aa7df792d71c1053365cc7b09cc08/pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917", size = 174319, upload-time = "2025-09-25T21:33:02.086Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/cd/f0cfc8c74f8a030017a2b9c771b7f47e5dd702c3e28e5b2071374bda2948/pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9", size = 737631, upload-time = "2025-09-25T21:33:03.25Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/b2/18f2bd28cd2055a79a46c9b0895c0b3d987ce40ee471cecf58a1a0199805/pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5", size = 836795, upload-time = "2025-09-25T21:33:05.014Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/b9/793686b2d54b531203c160ef12bec60228a0109c79bae6c1277961026770/pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a", size = 750767, upload-time = "2025-09-25T21:33:06.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/86/a137b39a611def2ed78b0e66ce2fe13ee701a07c07aebe55c340ed2a050e/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926", size = 727982, upload-time = "2025-09-25T21:33:08.708Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/62/71c27c94f457cf4418ef8ccc71735324c549f7e3ea9d34aba50874563561/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7", size = 755677, upload-time = "2025-09-25T21:33:09.876Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/3d/6f5e0d58bd924fb0d06c3a6bad00effbdae2de5adb5cda5648006ffbd8d3/pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0", size = 142592, upload-time = "2025-09-25T21:33:10.983Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/0c/25113e0b5e103d7f1490c0e947e303fe4a696c10b501dea7a9f49d4e876c/pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007", size = 158777, upload-time = "2025-09-25T21:33:15.55Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml-ft"
|
||||
version = "8.0.0"
|
||||
|
|
@ -2347,7 +2437,8 @@ name = "sphinx"
|
|||
version = "8.1.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.13'",
|
||||
"python_full_version >= '3.14'",
|
||||
"python_full_version == '3.13.*'",
|
||||
"python_full_version >= '3.11' and python_full_version < '3.13'",
|
||||
"python_full_version == '3.10.*'",
|
||||
]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue